Mock Version: 4.1 Mock Version: 4.1 Mock Version: 4.1 ENTER ['do_with_status'](['bash', '--login', '-c', '/usr/bin/rpmbuild -bs --noclean --target noarch --nodeps /builddir/build/SPECS/python-jsonpath-rw.spec'], chrootPath='/var/lib/mock/dist-ocs23-build-67892-3705/root'env={'TERM': 'vt100', 'SHELL': '/bin/bash', 'HOME': '/builddir', 'HOSTNAME': 'mock', 'PATH': '/usr/bin:/bin:/usr/sbin:/sbin', 'PROMPT_COMMAND': 'printf "\\033]0;\\007"', 'PS1': ' \\s-\\v\\$ ', 'LANG': 'C.UTF-8'}shell=Falselogger=timeout=86400uid=981gid=135user='mockbuild'nspawn_args=[]unshare_net=TrueprintOutput=False) Executing command: ['bash', '--login', '-c', '/usr/bin/rpmbuild -bs --noclean --target noarch --nodeps /builddir/build/SPECS/python-jsonpath-rw.spec'] with env {'TERM': 'vt100', 'SHELL': '/bin/bash', 'HOME': '/builddir', 'HOSTNAME': 'mock', 'PATH': '/usr/bin:/bin:/usr/sbin:/sbin', 'PROMPT_COMMAND': 'printf "\\033]0;\\007"', 'PS1': ' \\s-\\v\\$ ', 'LANG': 'C.UTF-8'} and shell False Building target platforms: noarch Building for target noarch Wrote: /builddir/build/SRPMS/python-jsonpath-rw-1.4.0-3.ocs23.src.rpm Child return code was: 0 ENTER ['do_with_status'](['bash', '--login', '-c', '/usr/bin/rpmbuild -bb --noclean --target noarch --nodeps /builddir/build/SPECS/python-jsonpath-rw.spec'], chrootPath='/var/lib/mock/dist-ocs23-build-67892-3705/root'env={'TERM': 'vt100', 'SHELL': '/bin/bash', 'HOME': '/builddir', 'HOSTNAME': 'mock', 'PATH': '/usr/bin:/bin:/usr/sbin:/sbin', 'PROMPT_COMMAND': 'printf "\\033]0;\\007"', 'PS1': ' \\s-\\v\\$ ', 'LANG': 'C.UTF-8'}shell=Falselogger=timeout=86400uid=981gid=135user='mockbuild'nspawn_args=[]unshare_net=TrueprintOutput=False) Executing command: ['bash', '--login', '-c', '/usr/bin/rpmbuild -bb --noclean --target noarch --nodeps /builddir/build/SPECS/python-jsonpath-rw.spec'] with env {'TERM': 'vt100', 'SHELL': '/bin/bash', 'HOME': '/builddir', 'HOSTNAME': 'mock', 'PATH': '/usr/bin:/bin:/usr/sbin:/sbin', 'PROMPT_COMMAND': 'printf "\\033]0;\\007"', 'PS1': ' \\s-\\v\\$ ', 'LANG': 'C.UTF-8'} and shell False Building target platforms: noarch Building for target noarch Executing(%prep): /bin/sh -e /var/tmp/rpm-tmp.NZ7qhN + umask 022 + cd /builddir/build/BUILD + cd /builddir/build/BUILD + rm -rf python-jsonpath-rw-1.4.0 + /usr/lib/rpm/rpmuncompress -x /builddir/build/SOURCES/python-jsonpath-rw-1.4.0.tar.gz + STATUS=0 + '[' 0 -ne 0 ']' + cd python-jsonpath-rw-1.4.0 + /usr/bin/chmod -Rf a+rX,u+w,g-w,o-w . + RPM_EC=0 ++ jobs -p + exit 0 Executing(%build): /bin/sh -e /var/tmp/rpm-tmp.ScXy0I + umask 022 + cd /builddir/build/BUILD + CFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/OpenCloudOS/OpenCloudOS-hardened-cc1 -fstack-protector-strong -mbranch-protection=standard -fasynchronous-unwind-tables -fstack-clash-protection' + export CFLAGS + CXXFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/OpenCloudOS/OpenCloudOS-hardened-cc1 -fstack-protector-strong -mbranch-protection=standard -fasynchronous-unwind-tables -fstack-clash-protection' + export CXXFLAGS + FFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/OpenCloudOS/OpenCloudOS-hardened-cc1 -fstack-protector-strong -mbranch-protection=standard -fasynchronous-unwind-tables -fstack-clash-protection -I/usr/lib/gfortran/modules' + export FFLAGS + FCFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/OpenCloudOS/OpenCloudOS-hardened-cc1 -fstack-protector-strong -mbranch-protection=standard -fasynchronous-unwind-tables -fstack-clash-protection -I/usr/lib/gfortran/modules' + export FCFLAGS + LDFLAGS='-Wl,-z,relro -Wl,--as-needed -Wl,-z,now -specs=/usr/lib/rpm/OpenCloudOS/OpenCloudOS-hardened-ld -Wl,--build-id=sha1' + export LDFLAGS + LT_SYS_LIBRARY_PATH=/usr/lib: + export LT_SYS_LIBRARY_PATH + CC=gcc + export CC + CXX=g++ + export CXX + cd python-jsonpath-rw-1.4.0 + CFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/OpenCloudOS/OpenCloudOS-hardened-cc1 -fstack-protector-strong -mbranch-protection=standard -fasynchronous-unwind-tables -fstack-clash-protection' + LDFLAGS='-Wl,-z,relro -Wl,--as-needed -Wl,-z,now -specs=/usr/lib/rpm/OpenCloudOS/OpenCloudOS-hardened-ld -Wl,--build-id=sha1' + /usr/bin/python3 setup.py build '--executable=/usr/bin/python3 -s' running build running build_py creating build creating build/lib creating build/lib/jsonpath_rw copying jsonpath_rw/jsonpath.py -> build/lib/jsonpath_rw copying jsonpath_rw/parser.py -> build/lib/jsonpath_rw copying jsonpath_rw/lexer.py -> build/lib/jsonpath_rw copying jsonpath_rw/__init__.py -> build/lib/jsonpath_rw creating build/lib/jsonpath_rw/bin copying jsonpath_rw/bin/jsonpath.py -> build/lib/jsonpath_rw/bin copying jsonpath_rw/bin/__init__.py -> build/lib/jsonpath_rw/bin + RPM_EC=0 ++ jobs -p + exit 0 Executing(%install): /bin/sh -e /var/tmp/rpm-tmp.aGJNHh + umask 022 + cd /builddir/build/BUILD + '[' /builddir/build/BUILDROOT/python-jsonpath-rw-1.4.0-3.ocs23.noarch '!=' / ']' + rm -rf /builddir/build/BUILDROOT/python-jsonpath-rw-1.4.0-3.ocs23.noarch ++ dirname /builddir/build/BUILDROOT/python-jsonpath-rw-1.4.0-3.ocs23.noarch + mkdir -p /builddir/build/BUILDROOT + mkdir /builddir/build/BUILDROOT/python-jsonpath-rw-1.4.0-3.ocs23.noarch + cd python-jsonpath-rw-1.4.0 + CFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/OpenCloudOS/OpenCloudOS-hardened-cc1 -fstack-protector-strong -mbranch-protection=standard -fasynchronous-unwind-tables -fstack-clash-protection' + LDFLAGS='-Wl,-z,relro -Wl,--as-needed -Wl,-z,now -specs=/usr/lib/rpm/OpenCloudOS/OpenCloudOS-hardened-ld -Wl,--build-id=sha1' + /usr/bin/python3 setup.py install -O1 --skip-build --root /builddir/build/BUILDROOT/python-jsonpath-rw-1.4.0-3.ocs23.noarch running install /usr/lib/python3.10/site-packages/setuptools/command/install.py:34: SetuptoolsDeprecationWarning: setup.py install is deprecated. Use build and pip and other standards-based tools. warnings.warn( running install_lib creating /builddir/build/BUILDROOT/python-jsonpath-rw-1.4.0-3.ocs23.noarch/usr creating /builddir/build/BUILDROOT/python-jsonpath-rw-1.4.0-3.ocs23.noarch/usr/lib creating /builddir/build/BUILDROOT/python-jsonpath-rw-1.4.0-3.ocs23.noarch/usr/lib/python3.10 creating /builddir/build/BUILDROOT/python-jsonpath-rw-1.4.0-3.ocs23.noarch/usr/lib/python3.10/site-packages creating /builddir/build/BUILDROOT/python-jsonpath-rw-1.4.0-3.ocs23.noarch/usr/lib/python3.10/site-packages/jsonpath_rw copying build/lib/jsonpath_rw/jsonpath.py -> /builddir/build/BUILDROOT/python-jsonpath-rw-1.4.0-3.ocs23.noarch/usr/lib/python3.10/site-packages/jsonpath_rw copying build/lib/jsonpath_rw/parser.py -> /builddir/build/BUILDROOT/python-jsonpath-rw-1.4.0-3.ocs23.noarch/usr/lib/python3.10/site-packages/jsonpath_rw copying build/lib/jsonpath_rw/lexer.py -> /builddir/build/BUILDROOT/python-jsonpath-rw-1.4.0-3.ocs23.noarch/usr/lib/python3.10/site-packages/jsonpath_rw creating /builddir/build/BUILDROOT/python-jsonpath-rw-1.4.0-3.ocs23.noarch/usr/lib/python3.10/site-packages/jsonpath_rw/bin copying build/lib/jsonpath_rw/bin/jsonpath.py -> /builddir/build/BUILDROOT/python-jsonpath-rw-1.4.0-3.ocs23.noarch/usr/lib/python3.10/site-packages/jsonpath_rw/bin copying build/lib/jsonpath_rw/bin/__init__.py -> /builddir/build/BUILDROOT/python-jsonpath-rw-1.4.0-3.ocs23.noarch/usr/lib/python3.10/site-packages/jsonpath_rw/bin copying build/lib/jsonpath_rw/__init__.py -> /builddir/build/BUILDROOT/python-jsonpath-rw-1.4.0-3.ocs23.noarch/usr/lib/python3.10/site-packages/jsonpath_rw byte-compiling /builddir/build/BUILDROOT/python-jsonpath-rw-1.4.0-3.ocs23.noarch/usr/lib/python3.10/site-packages/jsonpath_rw/jsonpath.py to jsonpath.cpython-310.pyc byte-compiling /builddir/build/BUILDROOT/python-jsonpath-rw-1.4.0-3.ocs23.noarch/usr/lib/python3.10/site-packages/jsonpath_rw/parser.py to parser.cpython-310.pyc byte-compiling /builddir/build/BUILDROOT/python-jsonpath-rw-1.4.0-3.ocs23.noarch/usr/lib/python3.10/site-packages/jsonpath_rw/lexer.py to lexer.cpython-310.pyc byte-compiling /builddir/build/BUILDROOT/python-jsonpath-rw-1.4.0-3.ocs23.noarch/usr/lib/python3.10/site-packages/jsonpath_rw/bin/jsonpath.py to jsonpath.cpython-310.pyc byte-compiling /builddir/build/BUILDROOT/python-jsonpath-rw-1.4.0-3.ocs23.noarch/usr/lib/python3.10/site-packages/jsonpath_rw/bin/__init__.py to __init__.cpython-310.pyc byte-compiling /builddir/build/BUILDROOT/python-jsonpath-rw-1.4.0-3.ocs23.noarch/usr/lib/python3.10/site-packages/jsonpath_rw/__init__.py to __init__.cpython-310.pyc writing byte-compilation script '/tmp/tmpg95nl3jh.py' /usr/bin/python3 /tmp/tmpg95nl3jh.py removing /tmp/tmpg95nl3jh.py running install_egg_info running egg_info creating jsonpath_rw.egg-info writing jsonpath_rw.egg-info/PKG-INFO writing dependency_links to jsonpath_rw.egg-info/dependency_links.txt writing entry points to jsonpath_rw.egg-info/entry_points.txt writing requirements to jsonpath_rw.egg-info/requires.txt writing top-level names to jsonpath_rw.egg-info/top_level.txt writing manifest file 'jsonpath_rw.egg-info/SOURCES.txt' reading manifest file 'jsonpath_rw.egg-info/SOURCES.txt' adding license file 'LICENSE' writing manifest file 'jsonpath_rw.egg-info/SOURCES.txt' Copying jsonpath_rw.egg-info to /builddir/build/BUILDROOT/python-jsonpath-rw-1.4.0-3.ocs23.noarch/usr/lib/python3.10/site-packages/jsonpath_rw-1.4.0-py3.10.egg-info running install_scripts Installing jsonpath.py script to /builddir/build/BUILDROOT/python-jsonpath-rw-1.4.0-3.ocs23.noarch/usr/bin + rm -rfv /builddir/build/BUILDROOT/python-jsonpath-rw-1.4.0-3.ocs23.noarch/usr/bin/__pycache__ + /usr/bin/find-debuginfo -j8 --strict-build-id -m -i --build-id-seed 1.4.0-3.ocs23 --unique-debug-suffix -1.4.0-3.ocs23.noarch --unique-debug-src-base python-jsonpath-rw-1.4.0-3.ocs23.noarch -S debugsourcefiles.list /builddir/build/BUILD/python-jsonpath-rw-1.4.0 find: 'debug': No such file or directory + /usr/lib/rpm/check-buildroot + /usr/lib/rpm/OpenCloudOS/brp-ldconfig + /usr/lib/rpm/brp-compress + /usr/lib/rpm/OpenCloudOS/brp-strip-lto /usr/bin/strip + /usr/lib/rpm/brp-strip-static-archive /usr/bin/strip + /usr/lib/rpm/check-rpaths + /usr/lib/rpm/OpenCloudOS/brp-mangle-shebangs grep: Perl matching not supported in a --disable-perl-regexp build + /usr/lib/rpm/OpenCloudOS/brp-python-bytecompile '' 1 0 Bytecompiling .py files below /builddir/build/BUILDROOT/python-jsonpath-rw-1.4.0-3.ocs23.noarch/usr/lib/python3.10 using python3.10 + /usr/lib/rpm/OpenCloudOS/brp-python-hardlink Executing(%check): /bin/sh -e /var/tmp/rpm-tmp.swZmdq + umask 022 + cd /builddir/build/BUILD + CFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/OpenCloudOS/OpenCloudOS-hardened-cc1 -fstack-protector-strong -mbranch-protection=standard -fasynchronous-unwind-tables -fstack-clash-protection' + export CFLAGS + CXXFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/OpenCloudOS/OpenCloudOS-hardened-cc1 -fstack-protector-strong -mbranch-protection=standard -fasynchronous-unwind-tables -fstack-clash-protection' + export CXXFLAGS + FFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/OpenCloudOS/OpenCloudOS-hardened-cc1 -fstack-protector-strong -mbranch-protection=standard -fasynchronous-unwind-tables -fstack-clash-protection -I/usr/lib/gfortran/modules' + export FFLAGS + FCFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/OpenCloudOS/OpenCloudOS-hardened-cc1 -fstack-protector-strong -mbranch-protection=standard -fasynchronous-unwind-tables -fstack-clash-protection -I/usr/lib/gfortran/modules' + export FCFLAGS + LDFLAGS='-Wl,-z,relro -Wl,--as-needed -Wl,-z,now -specs=/usr/lib/rpm/OpenCloudOS/OpenCloudOS-hardened-ld -Wl,--build-id=sha1' + export LDFLAGS + LT_SYS_LIBRARY_PATH=/usr/lib: + export LT_SYS_LIBRARY_PATH + CC=gcc + export CC + CXX=g++ + export CXX + cd python-jsonpath-rw-1.4.0 + PYTHONPATH=/builddir/build/BUILDROOT/python-jsonpath-rw-1.4.0-3.ocs23.noarch/usr/lib/python3.10/site-packages + /usr/bin/python3 setup.py test running test WARNING: Testing via this command is deprecated and will be removed in a future version. Users looking for a generic test entry point independent of test runner are encouraged to use tox. running egg_info writing jsonpath_rw.egg-info/PKG-INFO writing dependency_links to jsonpath_rw.egg-info/dependency_links.txt writing entry points to jsonpath_rw.egg-info/entry_points.txt writing requirements to jsonpath_rw.egg-info/requires.txt writing top-level names to jsonpath_rw.egg-info/top_level.txt reading manifest file 'jsonpath_rw.egg-info/SOURCES.txt' adding license file 'LICENSE' writing manifest file 'jsonpath_rw.egg-info/SOURCES.txt' running build_ext test_basic_errors (tests.test_lexer.TestLexer) ... lex: tokens = ['DOUBLEDOT', 'NUMBER', 'ID', 'NAMED_OPERATOR', 'WHERE'] lex: literals = ['*', '.', '[', ']', '(', ')', '$', ',', ':', '|', '&'] lex: states = {'INITIAL': 'inclusive', 'singlequote': 'exclusive', 'doublequote': 'exclusive', 'backquote': 'exclusive'} lex: Adding rule t_ID -> '[a-zA-Z_@][a-zA-Z0-9_@\-]*' (state 'INITIAL') lex: Adding rule t_NUMBER -> '-?\d+' (state 'INITIAL') lex: Adding rule t_singlequote -> ''' (state 'INITIAL') lex: Adding rule t_doublequote -> '"' (state 'INITIAL') lex: Adding rule t_backquote -> '`' (state 'INITIAL') lex: Adding rule t_newline -> '\n' (state 'INITIAL') lex: Adding rule t_DOUBLEDOT -> '\.\.' (state 'INITIAL') lex: Adding rule t_singlequote_content -> '[^'\\]+' (state 'singlequote') lex: Adding rule t_singlequote_escape -> '\\.' (state 'singlequote') lex: Adding rule t_singlequote_end -> ''' (state 'singlequote') lex: Adding rule t_doublequote_content -> '[^"\\]+' (state 'doublequote') lex: Adding rule t_doublequote_escape -> '\\.' (state 'doublequote') lex: Adding rule t_doublequote_end -> '"' (state 'doublequote') lex: Adding rule t_backquote_escape -> '\\.' (state 'backquote') lex: Adding rule t_backquote_content -> '[^`\\]+' (state 'backquote') lex: Adding rule t_backquote_end -> '`' (state 'backquote') lex: ==== MASTER REGEXS FOLLOW ==== lex: state 'INITIAL' : regex[0] = '(?P[a-zA-Z_@][a-zA-Z0-9_@\-]*)|(?P-?\d+)|(?P')|(?P")|(?P`)|(?P\n)|(?P\.\.)' lex: state 'singlequote' : regex[0] = '(?P[^'\\]+)|(?P\\.)|(?P')' lex: state 'doublequote' : regex[0] = '(?P[^"\\]+)|(?P\\.)|(?P")' lex: state 'backquote' : regex[0] = '(?P\\.)|(?P[^`\\]+)|(?P`)' lex: tokens = ['DOUBLEDOT', 'NUMBER', 'ID', 'NAMED_OPERATOR', 'WHERE'] lex: literals = ['*', '.', '[', ']', '(', ')', '$', ',', ':', '|', '&'] lex: states = {'INITIAL': 'inclusive', 'singlequote': 'exclusive', 'doublequote': 'exclusive', 'backquote': 'exclusive'} lex: Adding rule t_ID -> '[a-zA-Z_@][a-zA-Z0-9_@\-]*' (state 'INITIAL') lex: Adding rule t_NUMBER -> '-?\d+' (state 'INITIAL') lex: Adding rule t_singlequote -> ''' (state 'INITIAL') lex: Adding rule t_doublequote -> '"' (state 'INITIAL') lex: Adding rule t_backquote -> '`' (state 'INITIAL') lex: Adding rule t_newline -> '\n' (state 'INITIAL') lex: Adding rule t_DOUBLEDOT -> '\.\.' (state 'INITIAL') lex: Adding rule t_singlequote_content -> '[^'\\]+' (state 'singlequote') lex: Adding rule t_singlequote_escape -> '\\.' (state 'singlequote') lex: Adding rule t_singlequote_end -> ''' (state 'singlequote') lex: Adding rule t_doublequote_content -> '[^"\\]+' (state 'doublequote') lex: Adding rule t_doublequote_escape -> '\\.' (state 'doublequote') lex: Adding rule t_doublequote_end -> '"' (state 'doublequote') lex: Adding rule t_backquote_escape -> '\\.' (state 'backquote') lex: Adding rule t_backquote_content -> '[^`\\]+' (state 'backquote') lex: Adding rule t_backquote_end -> '`' (state 'backquote') lex: ==== MASTER REGEXS FOLLOW ==== lex: state 'INITIAL' : regex[0] = '(?P[a-zA-Z_@][a-zA-Z0-9_@\-]*)|(?P-?\d+)|(?P')|(?P")|(?P`)|(?P\n)|(?P\.\.)' lex: state 'singlequote' : regex[0] = '(?P[^'\\]+)|(?P\\.)|(?P')' lex: state 'doublequote' : regex[0] = '(?P[^"\\]+)|(?P\\.)|(?P")' lex: state 'backquote' : regex[0] = '(?P\\.)|(?P[^`\\]+)|(?P`)' lex: tokens = ['DOUBLEDOT', 'NUMBER', 'ID', 'NAMED_OPERATOR', 'WHERE'] lex: literals = ['*', '.', '[', ']', '(', ')', '$', ',', ':', '|', '&'] lex: states = {'INITIAL': 'inclusive', 'singlequote': 'exclusive', 'doublequote': 'exclusive', 'backquote': 'exclusive'} lex: Adding rule t_ID -> '[a-zA-Z_@][a-zA-Z0-9_@\-]*' (state 'INITIAL') lex: Adding rule t_NUMBER -> '-?\d+' (state 'INITIAL') lex: Adding rule t_singlequote -> ''' (state 'INITIAL') lex: Adding rule t_doublequote -> '"' (state 'INITIAL') lex: Adding rule t_backquote -> '`' (state 'INITIAL') lex: Adding rule t_newline -> '\n' (state 'INITIAL') lex: Adding rule t_DOUBLEDOT -> '\.\.' (state 'INITIAL') lex: Adding rule t_singlequote_content -> '[^'\\]+' (state 'singlequote') lex: Adding rule t_singlequote_escape -> '\\.' (state 'singlequote') lex: Adding rule t_singlequote_end -> ''' (state 'singlequote') lex: Adding rule t_doublequote_content -> '[^"\\]+' (state 'doublequote') lex: Adding rule t_doublequote_escape -> '\\.' (state 'doublequote') lex: Adding rule t_doublequote_end -> '"' (state 'doublequote') lex: Adding rule t_backquote_escape -> '\\.' (state 'backquote') lex: Adding rule t_backquote_content -> '[^`\\]+' (state 'backquote') lex: Adding rule t_backquote_end -> '`' (state 'backquote') lex: ==== MASTER REGEXS FOLLOW ==== lex: state 'INITIAL' : regex[0] = '(?P[a-zA-Z_@][a-zA-Z0-9_@\-]*)|(?P-?\d+)|(?P')|(?P")|(?P`)|(?P\n)|(?P\.\.)' lex: state 'singlequote' : regex[0] = '(?P[^'\\]+)|(?P\\.)|(?P')' lex: state 'doublequote' : regex[0] = '(?P[^"\\]+)|(?P\\.)|(?P")' lex: state 'backquote' : regex[0] = '(?P\\.)|(?P[^`\\]+)|(?P`)' lex: tokens = ['DOUBLEDOT', 'NUMBER', 'ID', 'NAMED_OPERATOR', 'WHERE'] lex: literals = ['*', '.', '[', ']', '(', ')', '$', ',', ':', '|', '&'] lex: states = {'INITIAL': 'inclusive', 'singlequote': 'exclusive', 'doublequote': 'exclusive', 'backquote': 'exclusive'} lex: Adding rule t_ID -> '[a-zA-Z_@][a-zA-Z0-9_@\-]*' (state 'INITIAL') lex: Adding rule t_NUMBER -> '-?\d+' (state 'INITIAL') lex: Adding rule t_singlequote -> ''' (state 'INITIAL') lex: Adding rule t_doublequote -> '"' (state 'INITIAL') lex: Adding rule t_backquote -> '`' (state 'INITIAL') lex: Adding rule t_newline -> '\n' (state 'INITIAL') lex: Adding rule t_DOUBLEDOT -> '\.\.' (state 'INITIAL') lex: Adding rule t_singlequote_content -> '[^'\\]+' (state 'singlequote') lex: Adding rule t_singlequote_escape -> '\\.' (state 'singlequote') lex: Adding rule t_singlequote_end -> ''' (state 'singlequote') lex: Adding rule t_doublequote_content -> '[^"\\]+' (state 'doublequote') lex: Adding rule t_doublequote_escape -> '\\.' (state 'doublequote') lex: Adding rule t_doublequote_end -> '"' (state 'doublequote') lex: Adding rule t_backquote_escape -> '\\.' (state 'backquote') lex: Adding rule t_backquote_content -> '[^`\\]+' (state 'backquote') lex: Adding rule t_backquote_end -> '`' (state 'backquote') lex: ==== MASTER REGEXS FOLLOW ==== lex: state 'INITIAL' : regex[0] = '(?P[a-zA-Z_@][a-zA-Z0-9_@\-]*)|(?P-?\d+)|(?P')|(?P")|(?P`)|(?P\n)|(?P\.\.)' lex: state 'singlequote' : regex[0] = '(?P[^'\\]+)|(?P\\.)|(?P')' lex: state 'doublequote' : regex[0] = '(?P[^"\\]+)|(?P\\.)|(?P")' lex: state 'backquote' : regex[0] = '(?P\\.)|(?P[^`\\]+)|(?P`)' lex: tokens = ['DOUBLEDOT', 'NUMBER', 'ID', 'NAMED_OPERATOR', 'WHERE'] lex: literals = ['*', '.', '[', ']', '(', ')', '$', ',', ':', '|', '&'] lex: states = {'INITIAL': 'inclusive', 'singlequote': 'exclusive', 'doublequote': 'exclusive', 'backquote': 'exclusive'} lex: Adding rule t_ID -> '[a-zA-Z_@][a-zA-Z0-9_@\-]*' (state 'INITIAL') lex: Adding rule t_NUMBER -> '-?\d+' (state 'INITIAL') lex: Adding rule t_singlequote -> ''' (state 'INITIAL') lex: Adding rule t_doublequote -> '"' (state 'INITIAL') lex: Adding rule t_backquote -> '`' (state 'INITIAL') lex: Adding rule t_newline -> '\n' (state 'INITIAL') lex: Adding rule t_DOUBLEDOT -> '\.\.' (state 'INITIAL') lex: Adding rule t_singlequote_content -> '[^'\\]+' (state 'singlequote') lex: Adding rule t_singlequote_escape -> '\\.' (state 'singlequote') lex: Adding rule t_singlequote_end -> ''' (state 'singlequote') lex: Adding rule t_doublequote_content -> '[^"\\]+' (state 'doublequote') lex: Adding rule t_doublequote_escape -> '\\.' (state 'doublequote') lex: Adding rule t_doublequote_end -> '"' (state 'doublequote') lex: Adding rule t_backquote_escape -> '\\.' (state 'backquote') lex: Adding rule t_backquote_content -> '[^`\\]+' (state 'backquote') lex: Adding rule t_backquote_end -> '`' (state 'backquote') lex: ==== MASTER REGEXS FOLLOW ==== lex: state 'INITIAL' : regex[0] = '(?P[a-zA-Z_@][a-zA-Z0-9_@\-]*)|(?P-?\d+)|(?P')|(?P")|(?P`)|(?P\n)|(?P\.\.)' lex: state 'singlequote' : regex[0] = '(?P[^'\\]+)|(?P\\.)|(?P')' lex: state 'doublequote' : regex[0] = '(?P[^"\\]+)|(?P\\.)|(?P")' lex: state 'backquote' : regex[0] = '(?P\\.)|(?P[^`\\]+)|(?P`)' lex: tokens = ['DOUBLEDOT', 'NUMBER', 'ID', 'NAMED_OPERATOR', 'WHERE'] lex: literals = ['*', '.', '[', ']', '(', ')', '$', ',', ':', '|', '&'] lex: states = {'INITIAL': 'inclusive', 'singlequote': 'exclusive', 'doublequote': 'exclusive', 'backquote': 'exclusive'} lex: Adding rule t_ID -> '[a-zA-Z_@][a-zA-Z0-9_@\-]*' (state 'INITIAL') lex: Adding rule t_NUMBER -> '-?\d+' (state 'INITIAL') lex: Adding rule t_singlequote -> ''' (state 'INITIAL') lex: Adding rule t_doublequote -> '"' (state 'INITIAL') lex: Adding rule t_backquote -> '`' (state 'INITIAL') lex: Adding rule t_newline -> '\n' (state 'INITIAL') lex: Adding rule t_DOUBLEDOT -> '\.\.' (state 'INITIAL') lex: Adding rule t_singlequote_content -> '[^'\\]+' (state 'singlequote') lex: Adding rule t_singlequote_escape -> '\\.' (state 'singlequote') lex: Adding rule t_singlequote_end -> ''' (state 'singlequote') lex: Adding rule t_doublequote_content -> '[^"\\]+' (state 'doublequote') lex: Adding rule t_doublequote_escape -> '\\.' (state 'doublequote') lex: Adding rule t_doublequote_end -> '"' (state 'doublequote') lex: Adding rule t_backquote_escape -> '\\.' (state 'backquote') lex: Adding rule t_backquote_content -> '[^`\\]+' (state 'backquote') lex: Adding rule t_backquote_end -> '`' (state 'backquote') lex: ==== MASTER REGEXS FOLLOW ==== lex: state 'INITIAL' : regex[0] = '(?P[a-zA-Z_@][a-zA-Z0-9_@\-]*)|(?P-?\d+)|(?P')|(?P")|(?P`)|(?P\n)|(?P\.\.)' lex: state 'singlequote' : regex[0] = '(?P[^'\\]+)|(?P\\.)|(?P')' lex: state 'doublequote' : regex[0] = '(?P[^"\\]+)|(?P\\.)|(?P")' lex: state 'backquote' : regex[0] = '(?P\\.)|(?P[^`\\]+)|(?P`)' lex: tokens = ['DOUBLEDOT', 'NUMBER', 'ID', 'NAMED_OPERATOR', 'WHERE'] lex: literals = ['*', '.', '[', ']', '(', ')', '$', ',', ':', '|', '&'] lex: states = {'INITIAL': 'inclusive', 'singlequote': 'exclusive', 'doublequote': 'exclusive', 'backquote': 'exclusive'} lex: Adding rule t_ID -> '[a-zA-Z_@][a-zA-Z0-9_@\-]*' (state 'INITIAL') lex: Adding rule t_NUMBER -> '-?\d+' (state 'INITIAL') lex: Adding rule t_singlequote -> ''' (state 'INITIAL') lex: Adding rule t_doublequote -> '"' (state 'INITIAL') lex: Adding rule t_backquote -> '`' (state 'INITIAL') lex: Adding rule t_newline -> '\n' (state 'INITIAL') lex: Adding rule t_DOUBLEDOT -> '\.\.' (state 'INITIAL') lex: Adding rule t_singlequote_content -> '[^'\\]+' (state 'singlequote') lex: Adding rule t_singlequote_escape -> '\\.' (state 'singlequote') lex: Adding rule t_singlequote_end -> ''' (state 'singlequote') lex: Adding rule t_doublequote_content -> '[^"\\]+' (state 'doublequote') lex: Adding rule t_doublequote_escape -> '\\.' (state 'doublequote') lex: Adding rule t_doublequote_end -> '"' (state 'doublequote') lex: Adding rule t_backquote_escape -> '\\.' (state 'backquote') lex: Adding rule t_backquote_content -> '[^`\\]+' (state 'backquote') lex: Adding rule t_backquote_end -> '`' (state 'backquote') lex: ==== MASTER REGEXS FOLLOW ==== lex: state 'INITIAL' : regex[0] = '(?P[a-zA-Z_@][a-zA-Z0-9_@\-]*)|(?P-?\d+)|(?P')|(?P")|(?P`)|(?P\n)|(?P\.\.)' lex: state 'singlequote' : regex[0] = '(?P[^'\\]+)|(?P\\.)|(?P')' lex: state 'doublequote' : regex[0] = '(?P[^"\\]+)|(?P\\.)|(?P")' lex: state 'backquote' : regex[0] = '(?P\\.)|(?P[^`\\]+)|(?P`)' lex: tokens = ['DOUBLEDOT', 'NUMBER', 'ID', 'NAMED_OPERATOR', 'WHERE'] lex: literals = ['*', '.', '[', ']', '(', ')', '$', ',', ':', '|', '&'] lex: states = {'INITIAL': 'inclusive', 'singlequote': 'exclusive', 'doublequote': 'exclusive', 'backquote': 'exclusive'} lex: Adding rule t_ID -> '[a-zA-Z_@][a-zA-Z0-9_@\-]*' (state 'INITIAL') lex: Adding rule t_NUMBER -> '-?\d+' (state 'INITIAL') lex: Adding rule t_singlequote -> ''' (state 'INITIAL') lex: Adding rule t_doublequote -> '"' (state 'INITIAL') lex: Adding rule t_backquote -> '`' (state 'INITIAL') lex: Adding rule t_newline -> '\n' (state 'INITIAL') lex: Adding rule t_DOUBLEDOT -> '\.\.' (state 'INITIAL') lex: Adding rule t_singlequote_content -> '[^'\\]+' (state 'singlequote') lex: Adding rule t_singlequote_escape -> '\\.' (state 'singlequote') lex: Adding rule t_singlequote_end -> ''' (state 'singlequote') lex: Adding rule t_doublequote_content -> '[^"\\]+' (state 'doublequote') lex: Adding rule t_doublequote_escape -> '\\.' (state 'doublequote') lex: Adding rule t_doublequote_end -> '"' (state 'doublequote') lex: Adding rule t_backquote_escape -> '\\.' (state 'backquote') lex: Adding rule t_backquote_content -> '[^`\\]+' (state 'backquote') lex: Adding rule t_backquote_end -> '`' (state 'backquote') lex: ==== MASTER REGEXS FOLLOW ==== lex: state 'INITIAL' : regex[0] = '(?P[a-zA-Z_@][a-zA-Z0-9_@\-]*)|(?P-?\d+)|(?P')|(?P")|(?P`)|(?P\n)|(?P\.\.)' lex: state 'singlequote' : regex[0] = '(?P[^'\\]+)|(?P\\.)|(?P')' lex: state 'doublequote' : regex[0] = '(?P[^"\\]+)|(?P\\.)|(?P")' lex: state 'backquote' : regex[0] = '(?P\\.)|(?P[^`\\]+)|(?P`)' ok test_simple_inputs (tests.test_lexer.TestLexer) ... lex: tokens = ['DOUBLEDOT', 'NUMBER', 'ID', 'NAMED_OPERATOR', 'WHERE'] lex: literals = ['*', '.', '[', ']', '(', ')', '$', ',', ':', '|', '&'] lex: states = {'INITIAL': 'inclusive', 'singlequote': 'exclusive', 'doublequote': 'exclusive', 'backquote': 'exclusive'} lex: Adding rule t_ID -> '[a-zA-Z_@][a-zA-Z0-9_@\-]*' (state 'INITIAL') lex: Adding rule t_NUMBER -> '-?\d+' (state 'INITIAL') lex: Adding rule t_singlequote -> ''' (state 'INITIAL') lex: Adding rule t_doublequote -> '"' (state 'INITIAL') lex: Adding rule t_backquote -> '`' (state 'INITIAL') lex: Adding rule t_newline -> '\n' (state 'INITIAL') lex: Adding rule t_DOUBLEDOT -> '\.\.' (state 'INITIAL') lex: Adding rule t_singlequote_content -> '[^'\\]+' (state 'singlequote') lex: Adding rule t_singlequote_escape -> '\\.' (state 'singlequote') lex: Adding rule t_singlequote_end -> ''' (state 'singlequote') lex: Adding rule t_doublequote_content -> '[^"\\]+' (state 'doublequote') lex: Adding rule t_doublequote_escape -> '\\.' (state 'doublequote') lex: Adding rule t_doublequote_end -> '"' (state 'doublequote') lex: Adding rule t_backquote_escape -> '\\.' (state 'backquote') lex: Adding rule t_backquote_content -> '[^`\\]+' (state 'backquote') lex: Adding rule t_backquote_end -> '`' (state 'backquote') lex: ==== MASTER REGEXS FOLLOW ==== lex: state 'INITIAL' : regex[0] = '(?P[a-zA-Z_@][a-zA-Z0-9_@\-]*)|(?P-?\d+)|(?P')|(?P")|(?P`)|(?P\n)|(?P\.\.)' lex: state 'singlequote' : regex[0] = '(?P[^'\\]+)|(?P\\.)|(?P')' lex: state 'doublequote' : regex[0] = '(?P[^"\\]+)|(?P\\.)|(?P")' lex: state 'backquote' : regex[0] = '(?P\\.)|(?P[^`\\]+)|(?P`)' lex: tokens = ['DOUBLEDOT', 'NUMBER', 'ID', 'NAMED_OPERATOR', 'WHERE'] lex: literals = ['*', '.', '[', ']', '(', ')', '$', ',', ':', '|', '&'] lex: states = {'INITIAL': 'inclusive', 'singlequote': 'exclusive', 'doublequote': 'exclusive', 'backquote': 'exclusive'} lex: Adding rule t_ID -> '[a-zA-Z_@][a-zA-Z0-9_@\-]*' (state 'INITIAL') lex: Adding rule t_NUMBER -> '-?\d+' (state 'INITIAL') lex: Adding rule t_singlequote -> ''' (state 'INITIAL') lex: Adding rule t_doublequote -> '"' (state 'INITIAL') lex: Adding rule t_backquote -> '`' (state 'INITIAL') lex: Adding rule t_newline -> '\n' (state 'INITIAL') lex: Adding rule t_DOUBLEDOT -> '\.\.' (state 'INITIAL') lex: Adding rule t_singlequote_content -> '[^'\\]+' (state 'singlequote') lex: Adding rule t_singlequote_escape -> '\\.' (state 'singlequote') lex: Adding rule t_singlequote_end -> ''' (state 'singlequote') lex: Adding rule t_doublequote_content -> '[^"\\]+' (state 'doublequote') lex: Adding rule t_doublequote_escape -> '\\.' (state 'doublequote') lex: Adding rule t_doublequote_end -> '"' (state 'doublequote') lex: Adding rule t_backquote_escape -> '\\.' (state 'backquote') lex: Adding rule t_backquote_content -> '[^`\\]+' (state 'backquote') lex: Adding rule t_backquote_end -> '`' (state 'backquote') lex: ==== MASTER REGEXS FOLLOW ==== lex: state 'INITIAL' : regex[0] = '(?P[a-zA-Z_@][a-zA-Z0-9_@\-]*)|(?P-?\d+)|(?P')|(?P")|(?P`)|(?P\n)|(?P\.\.)' lex: state 'singlequote' : regex[0] = '(?P[^'\\]+)|(?P\\.)|(?P')' lex: state 'doublequote' : regex[0] = '(?P[^"\\]+)|(?P\\.)|(?P")' lex: state 'backquote' : regex[0] = '(?P\\.)|(?P[^`\\]+)|(?P`)' lex: tokens = ['DOUBLEDOT', 'NUMBER', 'ID', 'NAMED_OPERATOR', 'WHERE'] lex: literals = ['*', '.', '[', ']', '(', ')', '$', ',', ':', '|', '&'] lex: states = {'INITIAL': 'inclusive', 'singlequote': 'exclusive', 'doublequote': 'exclusive', 'backquote': 'exclusive'} lex: Adding rule t_ID -> '[a-zA-Z_@][a-zA-Z0-9_@\-]*' (state 'INITIAL') lex: Adding rule t_NUMBER -> '-?\d+' (state 'INITIAL') lex: Adding rule t_singlequote -> ''' (state 'INITIAL') lex: Adding rule t_doublequote -> '"' (state 'INITIAL') lex: Adding rule t_backquote -> '`' (state 'INITIAL') lex: Adding rule t_newline -> '\n' (state 'INITIAL') lex: Adding rule t_DOUBLEDOT -> '\.\.' (state 'INITIAL') lex: Adding rule t_singlequote_content -> '[^'\\]+' (state 'singlequote') lex: Adding rule t_singlequote_escape -> '\\.' (state 'singlequote') lex: Adding rule t_singlequote_end -> ''' (state 'singlequote') lex: Adding rule t_doublequote_content -> '[^"\\]+' (state 'doublequote') lex: Adding rule t_doublequote_escape -> '\\.' (state 'doublequote') lex: Adding rule t_doublequote_end -> '"' (state 'doublequote') lex: Adding rule t_backquote_escape -> '\\.' (state 'backquote') lex: Adding rule t_backquote_content -> '[^`\\]+' (state 'backquote') lex: Adding rule t_backquote_end -> '`' (state 'backquote') lex: ==== MASTER REGEXS FOLLOW ==== lex: state 'INITIAL' : regex[0] = '(?P[a-zA-Z_@][a-zA-Z0-9_@\-]*)|(?P-?\d+)|(?P')|(?P")|(?P`)|(?P\n)|(?P\.\.)' lex: state 'singlequote' : regex[0] = '(?P[^'\\]+)|(?P\\.)|(?P')' lex: state 'doublequote' : regex[0] = '(?P[^"\\]+)|(?P\\.)|(?P")' lex: state 'backquote' : regex[0] = '(?P\\.)|(?P[^`\\]+)|(?P`)' lex: tokens = ['DOUBLEDOT', 'NUMBER', 'ID', 'NAMED_OPERATOR', 'WHERE'] lex: literals = ['*', '.', '[', ']', '(', ')', '$', ',', ':', '|', '&'] lex: states = {'INITIAL': 'inclusive', 'singlequote': 'exclusive', 'doublequote': 'exclusive', 'backquote': 'exclusive'} lex: Adding rule t_ID -> '[a-zA-Z_@][a-zA-Z0-9_@\-]*' (state 'INITIAL') lex: Adding rule t_NUMBER -> '-?\d+' (state 'INITIAL') lex: Adding rule t_singlequote -> ''' (state 'INITIAL') lex: Adding rule t_doublequote -> '"' (state 'INITIAL') lex: Adding rule t_backquote -> '`' (state 'INITIAL') lex: Adding rule t_newline -> '\n' (state 'INITIAL') lex: Adding rule t_DOUBLEDOT -> '\.\.' (state 'INITIAL') lex: Adding rule t_singlequote_content -> '[^'\\]+' (state 'singlequote') lex: Adding rule t_singlequote_escape -> '\\.' (state 'singlequote') lex: Adding rule t_singlequote_end -> ''' (state 'singlequote') lex: Adding rule t_doublequote_content -> '[^"\\]+' (state 'doublequote') lex: Adding rule t_doublequote_escape -> '\\.' (state 'doublequote') lex: Adding rule t_doublequote_end -> '"' (state 'doublequote') lex: Adding rule t_backquote_escape -> '\\.' (state 'backquote') lex: Adding rule t_backquote_content -> '[^`\\]+' (state 'backquote') lex: Adding rule t_backquote_end -> '`' (state 'backquote') lex: ==== MASTER REGEXS FOLLOW ==== lex: state 'INITIAL' : regex[0] = '(?P[a-zA-Z_@][a-zA-Z0-9_@\-]*)|(?P-?\d+)|(?P')|(?P")|(?P`)|(?P\n)|(?P\.\.)' lex: state 'singlequote' : regex[0] = '(?P[^'\\]+)|(?P\\.)|(?P')' lex: state 'doublequote' : regex[0] = '(?P[^"\\]+)|(?P\\.)|(?P")' lex: state 'backquote' : regex[0] = '(?P\\.)|(?P[^`\\]+)|(?P`)' lex: tokens = ['DOUBLEDOT', 'NUMBER', 'ID', 'NAMED_OPERATOR', 'WHERE'] lex: literals = ['*', '.', '[', ']', '(', ')', '$', ',', ':', '|', '&'] lex: states = {'INITIAL': 'inclusive', 'singlequote': 'exclusive', 'doublequote': 'exclusive', 'backquote': 'exclusive'} lex: Adding rule t_ID -> '[a-zA-Z_@][a-zA-Z0-9_@\-]*' (state 'INITIAL') lex: Adding rule t_NUMBER -> '-?\d+' (state 'INITIAL') lex: Adding rule t_singlequote -> ''' (state 'INITIAL') lex: Adding rule t_doublequote -> '"' (state 'INITIAL') lex: Adding rule t_backquote -> '`' (state 'INITIAL') lex: Adding rule t_newline -> '\n' (state 'INITIAL') lex: Adding rule t_DOUBLEDOT -> '\.\.' (state 'INITIAL') lex: Adding rule t_singlequote_content -> '[^'\\]+' (state 'singlequote') lex: Adding rule t_singlequote_escape -> '\\.' (state 'singlequote') lex: Adding rule t_singlequote_end -> ''' (state 'singlequote') lex: Adding rule t_doublequote_content -> '[^"\\]+' (state 'doublequote') lex: Adding rule t_doublequote_escape -> '\\.' (state 'doublequote') lex: Adding rule t_doublequote_end -> '"' (state 'doublequote') lex: Adding rule t_backquote_escape -> '\\.' (state 'backquote') lex: Adding rule t_backquote_content -> '[^`\\]+' (state 'backquote') lex: Adding rule t_backquote_end -> '`' (state 'backquote') lex: ==== MASTER REGEXS FOLLOW ==== lex: state 'INITIAL' : regex[0] = '(?P[a-zA-Z_@][a-zA-Z0-9_@\-]*)|(?P-?\d+)|(?P')|(?P")|(?P`)|(?P\n)|(?P\.\.)' lex: state 'singlequote' : regex[0] = '(?P[^'\\]+)|(?P\\.)|(?P')' lex: state 'doublequote' : regex[0] = '(?P[^"\\]+)|(?P\\.)|(?P")' lex: state 'backquote' : regex[0] = '(?P\\.)|(?P[^`\\]+)|(?P`)' lex: tokens = ['DOUBLEDOT', 'NUMBER', 'ID', 'NAMED_OPERATOR', 'WHERE'] lex: literals = ['*', '.', '[', ']', '(', ')', '$', ',', ':', '|', '&'] lex: states = {'INITIAL': 'inclusive', 'singlequote': 'exclusive', 'doublequote': 'exclusive', 'backquote': 'exclusive'} lex: Adding rule t_ID -> '[a-zA-Z_@][a-zA-Z0-9_@\-]*' (state 'INITIAL') lex: Adding rule t_NUMBER -> '-?\d+' (state 'INITIAL') lex: Adding rule t_singlequote -> ''' (state 'INITIAL') lex: Adding rule t_doublequote -> '"' (state 'INITIAL') lex: Adding rule t_backquote -> '`' (state 'INITIAL') lex: Adding rule t_newline -> '\n' (state 'INITIAL') lex: Adding rule t_DOUBLEDOT -> '\.\.' (state 'INITIAL') lex: Adding rule t_singlequote_content -> '[^'\\]+' (state 'singlequote') lex: Adding rule t_singlequote_escape -> '\\.' (state 'singlequote') lex: Adding rule t_singlequote_end -> ''' (state 'singlequote') lex: Adding rule t_doublequote_content -> '[^"\\]+' (state 'doublequote') lex: Adding rule t_doublequote_escape -> '\\.' (state 'doublequote') lex: Adding rule t_doublequote_end -> '"' (state 'doublequote') lex: Adding rule t_backquote_escape -> '\\.' (state 'backquote') lex: Adding rule t_backquote_content -> '[^`\\]+' (state 'backquote') lex: Adding rule t_backquote_end -> '`' (state 'backquote') lex: ==== MASTER REGEXS FOLLOW ==== lex: state 'INITIAL' : regex[0] = '(?P[a-zA-Z_@][a-zA-Z0-9_@\-]*)|(?P-?\d+)|(?P')|(?P")|(?P`)|(?P\n)|(?P\.\.)' lex: state 'singlequote' : regex[0] = '(?P[^'\\]+)|(?P\\.)|(?P')' lex: state 'doublequote' : regex[0] = '(?P[^"\\]+)|(?P\\.)|(?P")' lex: state 'backquote' : regex[0] = '(?P\\.)|(?P[^`\\]+)|(?P`)' lex: tokens = ['DOUBLEDOT', 'NUMBER', 'ID', 'NAMED_OPERATOR', 'WHERE'] lex: literals = ['*', '.', '[', ']', '(', ')', '$', ',', ':', '|', '&'] lex: states = {'INITIAL': 'inclusive', 'singlequote': 'exclusive', 'doublequote': 'exclusive', 'backquote': 'exclusive'} lex: Adding rule t_ID -> '[a-zA-Z_@][a-zA-Z0-9_@\-]*' (state 'INITIAL') lex: Adding rule t_NUMBER -> '-?\d+' (state 'INITIAL') lex: Adding rule t_singlequote -> ''' (state 'INITIAL') lex: Adding rule t_doublequote -> '"' (state 'INITIAL') lex: Adding rule t_backquote -> '`' (state 'INITIAL') lex: Adding rule t_newline -> '\n' (state 'INITIAL') lex: Adding rule t_DOUBLEDOT -> '\.\.' (state 'INITIAL') lex: Adding rule t_singlequote_content -> '[^'\\]+' (state 'singlequote') lex: Adding rule t_singlequote_escape -> '\\.' (state 'singlequote') lex: Adding rule t_singlequote_end -> ''' (state 'singlequote') lex: Adding rule t_doublequote_content -> '[^"\\]+' (state 'doublequote') lex: Adding rule t_doublequote_escape -> '\\.' (state 'doublequote') lex: Adding rule t_doublequote_end -> '"' (state 'doublequote') lex: Adding rule t_backquote_escape -> '\\.' (state 'backquote') lex: Adding rule t_backquote_content -> '[^`\\]+' (state 'backquote') lex: Adding rule t_backquote_end -> '`' (state 'backquote') lex: ==== MASTER REGEXS FOLLOW ==== lex: state 'INITIAL' : regex[0] = '(?P[a-zA-Z_@][a-zA-Z0-9_@\-]*)|(?P-?\d+)|(?P')|(?P")|(?P`)|(?P\n)|(?P\.\.)' lex: state 'singlequote' : regex[0] = '(?P[^'\\]+)|(?P\\.)|(?P')' lex: state 'doublequote' : regex[0] = '(?P[^"\\]+)|(?P\\.)|(?P")' lex: state 'backquote' : regex[0] = '(?P\\.)|(?P[^`\\]+)|(?P`)' lex: tokens = ['DOUBLEDOT', 'NUMBER', 'ID', 'NAMED_OPERATOR', 'WHERE'] lex: literals = ['*', '.', '[', ']', '(', ')', '$', ',', ':', '|', '&'] lex: states = {'INITIAL': 'inclusive', 'singlequote': 'exclusive', 'doublequote': 'exclusive', 'backquote': 'exclusive'} lex: Adding rule t_ID -> '[a-zA-Z_@][a-zA-Z0-9_@\-]*' (state 'INITIAL') lex: Adding rule t_NUMBER -> '-?\d+' (state 'INITIAL') lex: Adding rule t_singlequote -> ''' (state 'INITIAL') lex: Adding rule t_doublequote -> '"' (state 'INITIAL') lex: Adding rule t_backquote -> '`' (state 'INITIAL') lex: Adding rule t_newline -> '\n' (state 'INITIAL') lex: Adding rule t_DOUBLEDOT -> '\.\.' (state 'INITIAL') lex: Adding rule t_singlequote_content -> '[^'\\]+' (state 'singlequote') lex: Adding rule t_singlequote_escape -> '\\.' (state 'singlequote') lex: Adding rule t_singlequote_end -> ''' (state 'singlequote') lex: Adding rule t_doublequote_content -> '[^"\\]+' (state 'doublequote') lex: Adding rule t_doublequote_escape -> '\\.' (state 'doublequote') lex: Adding rule t_doublequote_end -> '"' (state 'doublequote') lex: Adding rule t_backquote_escape -> '\\.' (state 'backquote') lex: Adding rule t_backquote_content -> '[^`\\]+' (state 'backquote') lex: Adding rule t_backquote_end -> '`' (state 'backquote') lex: ==== MASTER REGEXS FOLLOW ==== lex: state 'INITIAL' : regex[0] = '(?P[a-zA-Z_@][a-zA-Z0-9_@\-]*)|(?P-?\d+)|(?P')|(?P")|(?P`)|(?P\n)|(?P\.\.)' lex: state 'singlequote' : regex[0] = '(?P[^'\\]+)|(?P\\.)|(?P')' lex: state 'doublequote' : regex[0] = '(?P[^"\\]+)|(?P\\.)|(?P")' lex: state 'backquote' : regex[0] = '(?P\\.)|(?P[^`\\]+)|(?P`)' lex: tokens = ['DOUBLEDOT', 'NUMBER', 'ID', 'NAMED_OPERATOR', 'WHERE'] lex: literals = ['*', '.', '[', ']', '(', ')', '$', ',', ':', '|', '&'] lex: states = {'INITIAL': 'inclusive', 'singlequote': 'exclusive', 'doublequote': 'exclusive', 'backquote': 'exclusive'} lex: Adding rule t_ID -> '[a-zA-Z_@][a-zA-Z0-9_@\-]*' (state 'INITIAL') lex: Adding rule t_NUMBER -> '-?\d+' (state 'INITIAL') lex: Adding rule t_singlequote -> ''' (state 'INITIAL') lex: Adding rule t_doublequote -> '"' (state 'INITIAL') lex: Adding rule t_backquote -> '`' (state 'INITIAL') lex: Adding rule t_newline -> '\n' (state 'INITIAL') lex: Adding rule t_DOUBLEDOT -> '\.\.' (state 'INITIAL') lex: Adding rule t_singlequote_content -> '[^'\\]+' (state 'singlequote') lex: Adding rule t_singlequote_escape -> '\\.' (state 'singlequote') lex: Adding rule t_singlequote_end -> ''' (state 'singlequote') lex: Adding rule t_doublequote_content -> '[^"\\]+' (state 'doublequote') lex: Adding rule t_doublequote_escape -> '\\.' (state 'doublequote') lex: Adding rule t_doublequote_end -> '"' (state 'doublequote') lex: Adding rule t_backquote_escape -> '\\.' (state 'backquote') lex: Adding rule t_backquote_content -> '[^`\\]+' (state 'backquote') lex: Adding rule t_backquote_end -> '`' (state 'backquote') lex: ==== MASTER REGEXS FOLLOW ==== lex: state 'INITIAL' : regex[0] = '(?P[a-zA-Z_@][a-zA-Z0-9_@\-]*)|(?P-?\d+)|(?P')|(?P")|(?P`)|(?P\n)|(?P\.\.)' lex: state 'singlequote' : regex[0] = '(?P[^'\\]+)|(?P\\.)|(?P')' lex: state 'doublequote' : regex[0] = '(?P[^"\\]+)|(?P\\.)|(?P")' lex: state 'backquote' : regex[0] = '(?P\\.)|(?P[^`\\]+)|(?P`)' lex: tokens = ['DOUBLEDOT', 'NUMBER', 'ID', 'NAMED_OPERATOR', 'WHERE'] lex: literals = ['*', '.', '[', ']', '(', ')', '$', ',', ':', '|', '&'] lex: states = {'INITIAL': 'inclusive', 'singlequote': 'exclusive', 'doublequote': 'exclusive', 'backquote': 'exclusive'} lex: Adding rule t_ID -> '[a-zA-Z_@][a-zA-Z0-9_@\-]*' (state 'INITIAL') lex: Adding rule t_NUMBER -> '-?\d+' (state 'INITIAL') lex: Adding rule t_singlequote -> ''' (state 'INITIAL') lex: Adding rule t_doublequote -> '"' (state 'INITIAL') lex: Adding rule t_backquote -> '`' (state 'INITIAL') lex: Adding rule t_newline -> '\n' (state 'INITIAL') lex: Adding rule t_DOUBLEDOT -> '\.\.' (state 'INITIAL') lex: Adding rule t_singlequote_content -> '[^'\\]+' (state 'singlequote') lex: Adding rule t_singlequote_escape -> '\\.' (state 'singlequote') lex: Adding rule t_singlequote_end -> ''' (state 'singlequote') lex: Adding rule t_doublequote_content -> '[^"\\]+' (state 'doublequote') lex: Adding rule t_doublequote_escape -> '\\.' (state 'doublequote') lex: Adding rule t_doublequote_end -> '"' (state 'doublequote') lex: Adding rule t_backquote_escape -> '\\.' (state 'backquote') lex: Adding rule t_backquote_content -> '[^`\\]+' (state 'backquote') lex: Adding rule t_backquote_end -> '`' (state 'backquote') lex: ==== MASTER REGEXS FOLLOW ==== lex: state 'INITIAL' : regex[0] = '(?P[a-zA-Z_@][a-zA-Z0-9_@\-]*)|(?P-?\d+)|(?P')|(?P")|(?P`)|(?P\n)|(?P\.\.)' lex: state 'singlequote' : regex[0] = '(?P[^'\\]+)|(?P\\.)|(?P')' lex: state 'doublequote' : regex[0] = '(?P[^"\\]+)|(?P\\.)|(?P")' lex: state 'backquote' : regex[0] = '(?P\\.)|(?P[^`\\]+)|(?P`)' lex: tokens = ['DOUBLEDOT', 'NUMBER', 'ID', 'NAMED_OPERATOR', 'WHERE'] lex: literals = ['*', '.', '[', ']', '(', ')', '$', ',', ':', '|', '&'] lex: states = {'INITIAL': 'inclusive', 'singlequote': 'exclusive', 'doublequote': 'exclusive', 'backquote': 'exclusive'} lex: Adding rule t_ID -> '[a-zA-Z_@][a-zA-Z0-9_@\-]*' (state 'INITIAL') lex: Adding rule t_NUMBER -> '-?\d+' (state 'INITIAL') lex: Adding rule t_singlequote -> ''' (state 'INITIAL') lex: Adding rule t_doublequote -> '"' (state 'INITIAL') lex: Adding rule t_backquote -> '`' (state 'INITIAL') lex: Adding rule t_newline -> '\n' (state 'INITIAL') lex: Adding rule t_DOUBLEDOT -> '\.\.' (state 'INITIAL') lex: Adding rule t_singlequote_content -> '[^'\\]+' (state 'singlequote') lex: Adding rule t_singlequote_escape -> '\\.' (state 'singlequote') lex: Adding rule t_singlequote_end -> ''' (state 'singlequote') lex: Adding rule t_doublequote_content -> '[^"\\]+' (state 'doublequote') lex: Adding rule t_doublequote_escape -> '\\.' (state 'doublequote') lex: Adding rule t_doublequote_end -> '"' (state 'doublequote') lex: Adding rule t_backquote_escape -> '\\.' (state 'backquote') lex: Adding rule t_backquote_content -> '[^`\\]+' (state 'backquote') lex: Adding rule t_backquote_end -> '`' (state 'backquote') lex: ==== MASTER REGEXS FOLLOW ==== lex: state 'INITIAL' : regex[0] = '(?P[a-zA-Z_@][a-zA-Z0-9_@\-]*)|(?P-?\d+)|(?P')|(?P")|(?P`)|(?P\n)|(?P\.\.)' lex: state 'singlequote' : regex[0] = '(?P[^'\\]+)|(?P\\.)|(?P')' lex: state 'doublequote' : regex[0] = '(?P[^"\\]+)|(?P\\.)|(?P")' lex: state 'backquote' : regex[0] = '(?P\\.)|(?P[^`\\]+)|(?P`)' lex: tokens = ['DOUBLEDOT', 'NUMBER', 'ID', 'NAMED_OPERATOR', 'WHERE'] lex: literals = ['*', '.', '[', ']', '(', ')', '$', ',', ':', '|', '&'] lex: states = {'INITIAL': 'inclusive', 'singlequote': 'exclusive', 'doublequote': 'exclusive', 'backquote': 'exclusive'} lex: Adding rule t_ID -> '[a-zA-Z_@][a-zA-Z0-9_@\-]*' (state 'INITIAL') lex: Adding rule t_NUMBER -> '-?\d+' (state 'INITIAL') lex: Adding rule t_singlequote -> ''' (state 'INITIAL') lex: Adding rule t_doublequote -> '"' (state 'INITIAL') lex: Adding rule t_backquote -> '`' (state 'INITIAL') lex: Adding rule t_newline -> '\n' (state 'INITIAL') lex: Adding rule t_DOUBLEDOT -> '\.\.' (state 'INITIAL') lex: Adding rule t_singlequote_content -> '[^'\\]+' (state 'singlequote') lex: Adding rule t_singlequote_escape -> '\\.' (state 'singlequote') lex: Adding rule t_singlequote_end -> ''' (state 'singlequote') lex: Adding rule t_doublequote_content -> '[^"\\]+' (state 'doublequote') lex: Adding rule t_doublequote_escape -> '\\.' (state 'doublequote') lex: Adding rule t_doublequote_end -> '"' (state 'doublequote') lex: Adding rule t_backquote_escape -> '\\.' (state 'backquote') lex: Adding rule t_backquote_content -> '[^`\\]+' (state 'backquote') lex: Adding rule t_backquote_end -> '`' (state 'backquote') lex: ==== MASTER REGEXS FOLLOW ==== lex: state 'INITIAL' : regex[0] = '(?P[a-zA-Z_@][a-zA-Z0-9_@\-]*)|(?P-?\d+)|(?P')|(?P")|(?P`)|(?P\n)|(?P\.\.)' lex: state 'singlequote' : regex[0] = '(?P[^'\\]+)|(?P\\.)|(?P')' lex: state 'doublequote' : regex[0] = '(?P[^"\\]+)|(?P\\.)|(?P")' lex: state 'backquote' : regex[0] = '(?P\\.)|(?P[^`\\]+)|(?P`)' lex: tokens = ['DOUBLEDOT', 'NUMBER', 'ID', 'NAMED_OPERATOR', 'WHERE'] lex: literals = ['*', '.', '[', ']', '(', ')', '$', ',', ':', '|', '&'] lex: states = {'INITIAL': 'inclusive', 'singlequote': 'exclusive', 'doublequote': 'exclusive', 'backquote': 'exclusive'} lex: Adding rule t_ID -> '[a-zA-Z_@][a-zA-Z0-9_@\-]*' (state 'INITIAL') lex: Adding rule t_NUMBER -> '-?\d+' (state 'INITIAL') lex: Adding rule t_singlequote -> ''' (state 'INITIAL') lex: Adding rule t_doublequote -> '"' (state 'INITIAL') lex: Adding rule t_backquote -> '`' (state 'INITIAL') lex: Adding rule t_newline -> '\n' (state 'INITIAL') lex: Adding rule t_DOUBLEDOT -> '\.\.' (state 'INITIAL') lex: Adding rule t_singlequote_content -> '[^'\\]+' (state 'singlequote') lex: Adding rule t_singlequote_escape -> '\\.' (state 'singlequote') lex: Adding rule t_singlequote_end -> ''' (state 'singlequote') lex: Adding rule t_doublequote_content -> '[^"\\]+' (state 'doublequote') lex: Adding rule t_doublequote_escape -> '\\.' (state 'doublequote') lex: Adding rule t_doublequote_end -> '"' (state 'doublequote') lex: Adding rule t_backquote_escape -> '\\.' (state 'backquote') lex: Adding rule t_backquote_content -> '[^`\\]+' (state 'backquote') lex: Adding rule t_backquote_end -> '`' (state 'backquote') lex: ==== MASTER REGEXS FOLLOW ==== lex: state 'INITIAL' : regex[0] = '(?P[a-zA-Z_@][a-zA-Z0-9_@\-]*)|(?P-?\d+)|(?P')|(?P")|(?P`)|(?P\n)|(?P\.\.)' lex: state 'singlequote' : regex[0] = '(?P[^'\\]+)|(?P\\.)|(?P')' lex: state 'doublequote' : regex[0] = '(?P[^"\\]+)|(?P\\.)|(?P")' lex: state 'backquote' : regex[0] = '(?P\\.)|(?P[^`\\]+)|(?P`)' lex: tokens = ['DOUBLEDOT', 'NUMBER', 'ID', 'NAMED_OPERATOR', 'WHERE'] lex: literals = ['*', '.', '[', ']', '(', ')', '$', ',', ':', '|', '&'] lex: states = {'INITIAL': 'inclusive', 'singlequote': 'exclusive', 'doublequote': 'exclusive', 'backquote': 'exclusive'} lex: Adding rule t_ID -> '[a-zA-Z_@][a-zA-Z0-9_@\-]*' (state 'INITIAL') lex: Adding rule t_NUMBER -> '-?\d+' (state 'INITIAL') lex: Adding rule t_singlequote -> ''' (state 'INITIAL') lex: Adding rule t_doublequote -> '"' (state 'INITIAL') lex: Adding rule t_backquote -> '`' (state 'INITIAL') lex: Adding rule t_newline -> '\n' (state 'INITIAL') lex: Adding rule t_DOUBLEDOT -> '\.\.' (state 'INITIAL') lex: Adding rule t_singlequote_content -> '[^'\\]+' (state 'singlequote') lex: Adding rule t_singlequote_escape -> '\\.' (state 'singlequote') lex: Adding rule t_singlequote_end -> ''' (state 'singlequote') lex: Adding rule t_doublequote_content -> '[^"\\]+' (state 'doublequote') lex: Adding rule t_doublequote_escape -> '\\.' (state 'doublequote') lex: Adding rule t_doublequote_end -> '"' (state 'doublequote') lex: Adding rule t_backquote_escape -> '\\.' (state 'backquote') lex: Adding rule t_backquote_content -> '[^`\\]+' (state 'backquote') lex: Adding rule t_backquote_end -> '`' (state 'backquote') lex: ==== MASTER REGEXS FOLLOW ==== lex: state 'INITIAL' : regex[0] = '(?P[a-zA-Z_@][a-zA-Z0-9_@\-]*)|(?P-?\d+)|(?P')|(?P")|(?P`)|(?P\n)|(?P\.\.)' lex: state 'singlequote' : regex[0] = '(?P[^'\\]+)|(?P\\.)|(?P')' lex: state 'doublequote' : regex[0] = '(?P[^"\\]+)|(?P\\.)|(?P")' lex: state 'backquote' : regex[0] = '(?P\\.)|(?P[^`\\]+)|(?P`)' lex: tokens = ['DOUBLEDOT', 'NUMBER', 'ID', 'NAMED_OPERATOR', 'WHERE'] lex: literals = ['*', '.', '[', ']', '(', ')', '$', ',', ':', '|', '&'] lex: states = {'INITIAL': 'inclusive', 'singlequote': 'exclusive', 'doublequote': 'exclusive', 'backquote': 'exclusive'} lex: Adding rule t_ID -> '[a-zA-Z_@][a-zA-Z0-9_@\-]*' (state 'INITIAL') lex: Adding rule t_NUMBER -> '-?\d+' (state 'INITIAL') lex: Adding rule t_singlequote -> ''' (state 'INITIAL') lex: Adding rule t_doublequote -> '"' (state 'INITIAL') lex: Adding rule t_backquote -> '`' (state 'INITIAL') lex: Adding rule t_newline -> '\n' (state 'INITIAL') lex: Adding rule t_DOUBLEDOT -> '\.\.' (state 'INITIAL') lex: Adding rule t_singlequote_content -> '[^'\\]+' (state 'singlequote') lex: Adding rule t_singlequote_escape -> '\\.' (state 'singlequote') lex: Adding rule t_singlequote_end -> ''' (state 'singlequote') lex: Adding rule t_doublequote_content -> '[^"\\]+' (state 'doublequote') lex: Adding rule t_doublequote_escape -> '\\.' (state 'doublequote') lex: Adding rule t_doublequote_end -> '"' (state 'doublequote') lex: Adding rule t_backquote_escape -> '\\.' (state 'backquote') lex: Adding rule t_backquote_content -> '[^`\\]+' (state 'backquote') lex: Adding rule t_backquote_end -> '`' (state 'backquote') lex: ==== MASTER REGEXS FOLLOW ==== lex: state 'INITIAL' : regex[0] = '(?P[a-zA-Z_@][a-zA-Z0-9_@\-]*)|(?P-?\d+)|(?P')|(?P")|(?P`)|(?P\n)|(?P\.\.)' lex: state 'singlequote' : regex[0] = '(?P[^'\\]+)|(?P\\.)|(?P')' lex: state 'doublequote' : regex[0] = '(?P[^"\\]+)|(?P\\.)|(?P")' lex: state 'backquote' : regex[0] = '(?P\\.)|(?P[^`\\]+)|(?P`)' lex: tokens = ['DOUBLEDOT', 'NUMBER', 'ID', 'NAMED_OPERATOR', 'WHERE'] lex: literals = ['*', '.', '[', ']', '(', ')', '$', ',', ':', '|', '&'] lex: states = {'INITIAL': 'inclusive', 'singlequote': 'exclusive', 'doublequote': 'exclusive', 'backquote': 'exclusive'} lex: Adding rule t_ID -> '[a-zA-Z_@][a-zA-Z0-9_@\-]*' (state 'INITIAL') lex: Adding rule t_NUMBER -> '-?\d+' (state 'INITIAL') lex: Adding rule t_singlequote -> ''' (state 'INITIAL') lex: Adding rule t_doublequote -> '"' (state 'INITIAL') lex: Adding rule t_backquote -> '`' (state 'INITIAL') lex: Adding rule t_newline -> '\n' (state 'INITIAL') lex: Adding rule t_DOUBLEDOT -> '\.\.' (state 'INITIAL') lex: Adding rule t_singlequote_content -> '[^'\\]+' (state 'singlequote') lex: Adding rule t_singlequote_escape -> '\\.' (state 'singlequote') lex: Adding rule t_singlequote_end -> ''' (state 'singlequote') lex: Adding rule t_doublequote_content -> '[^"\\]+' (state 'doublequote') lex: Adding rule t_doublequote_escape -> '\\.' (state 'doublequote') lex: Adding rule t_doublequote_end -> '"' (state 'doublequote') lex: Adding rule t_backquote_escape -> '\\.' (state 'backquote') lex: Adding rule t_backquote_content -> '[^`\\]+' (state 'backquote') lex: Adding rule t_backquote_end -> '`' (state 'backquote') lex: ==== MASTER REGEXS FOLLOW ==== lex: state 'INITIAL' : regex[0] = '(?P[a-zA-Z_@][a-zA-Z0-9_@\-]*)|(?P-?\d+)|(?P')|(?P")|(?P`)|(?P\n)|(?P\.\.)' lex: state 'singlequote' : regex[0] = '(?P[^'\\]+)|(?P\\.)|(?P')' lex: state 'doublequote' : regex[0] = '(?P[^"\\]+)|(?P\\.)|(?P")' lex: state 'backquote' : regex[0] = '(?P\\.)|(?P[^`\\]+)|(?P`)' lex: tokens = ['DOUBLEDOT', 'NUMBER', 'ID', 'NAMED_OPERATOR', 'WHERE'] lex: literals = ['*', '.', '[', ']', '(', ')', '$', ',', ':', '|', '&'] lex: states = {'INITIAL': 'inclusive', 'singlequote': 'exclusive', 'doublequote': 'exclusive', 'backquote': 'exclusive'} lex: Adding rule t_ID -> '[a-zA-Z_@][a-zA-Z0-9_@\-]*' (state 'INITIAL') lex: Adding rule t_NUMBER -> '-?\d+' (state 'INITIAL') lex: Adding rule t_singlequote -> ''' (state 'INITIAL') lex: Adding rule t_doublequote -> '"' (state 'INITIAL') lex: Adding rule t_backquote -> '`' (state 'INITIAL') lex: Adding rule t_newline -> '\n' (state 'INITIAL') lex: Adding rule t_DOUBLEDOT -> '\.\.' (state 'INITIAL') lex: Adding rule t_singlequote_content -> '[^'\\]+' (state 'singlequote') lex: Adding rule t_singlequote_escape -> '\\.' (state 'singlequote') lex: Adding rule t_singlequote_end -> ''' (state 'singlequote') lex: Adding rule t_doublequote_content -> '[^"\\]+' (state 'doublequote') lex: Adding rule t_doublequote_escape -> '\\.' (state 'doublequote') lex: Adding rule t_doublequote_end -> '"' (state 'doublequote') lex: Adding rule t_backquote_escape -> '\\.' (state 'backquote') lex: Adding rule t_backquote_content -> '[^`\\]+' (state 'backquote') lex: Adding rule t_backquote_end -> '`' (state 'backquote') lex: ==== MASTER REGEXS FOLLOW ==== lex: state 'INITIAL' : regex[0] = '(?P[a-zA-Z_@][a-zA-Z0-9_@\-]*)|(?P-?\d+)|(?P')|(?P")|(?P`)|(?P\n)|(?P\.\.)' lex: state 'singlequote' : regex[0] = '(?P[^'\\]+)|(?P\\.)|(?P')' lex: state 'doublequote' : regex[0] = '(?P[^"\\]+)|(?P\\.)|(?P")' lex: state 'backquote' : regex[0] = '(?P\\.)|(?P[^`\\]+)|(?P`)' lex: tokens = ['DOUBLEDOT', 'NUMBER', 'ID', 'NAMED_OPERATOR', 'WHERE'] lex: literals = ['*', '.', '[', ']', '(', ')', '$', ',', ':', '|', '&'] lex: states = {'INITIAL': 'inclusive', 'singlequote': 'exclusive', 'doublequote': 'exclusive', 'backquote': 'exclusive'} lex: Adding rule t_ID -> '[a-zA-Z_@][a-zA-Z0-9_@\-]*' (state 'INITIAL') lex: Adding rule t_NUMBER -> '-?\d+' (state 'INITIAL') lex: Adding rule t_singlequote -> ''' (state 'INITIAL') lex: Adding rule t_doublequote -> '"' (state 'INITIAL') lex: Adding rule t_backquote -> '`' (state 'INITIAL') lex: Adding rule t_newline -> '\n' (state 'INITIAL') lex: Adding rule t_DOUBLEDOT -> '\.\.' (state 'INITIAL') lex: Adding rule t_singlequote_content -> '[^'\\]+' (state 'singlequote') lex: Adding rule t_singlequote_escape -> '\\.' (state 'singlequote') lex: Adding rule t_singlequote_end -> ''' (state 'singlequote') lex: Adding rule t_doublequote_content -> '[^"\\]+' (state 'doublequote') lex: Adding rule t_doublequote_escape -> '\\.' (state 'doublequote') lex: Adding rule t_doublequote_end -> '"' (state 'doublequote') lex: Adding rule t_backquote_escape -> '\\.' (state 'backquote') lex: Adding rule t_backquote_content -> '[^`\\]+' (state 'backquote') lex: Adding rule t_backquote_end -> '`' (state 'backquote') lex: ==== MASTER REGEXS FOLLOW ==== lex: state 'INITIAL' : regex[0] = '(?P[a-zA-Z_@][a-zA-Z0-9_@\-]*)|(?P-?\d+)|(?P')|(?P")|(?P`)|(?P\n)|(?P\.\.)' lex: state 'singlequote' : regex[0] = '(?P[^'\\]+)|(?P\\.)|(?P')' lex: state 'doublequote' : regex[0] = '(?P[^"\\]+)|(?P\\.)|(?P")' lex: state 'backquote' : regex[0] = '(?P\\.)|(?P[^`\\]+)|(?P`)' lex: tokens = ['DOUBLEDOT', 'NUMBER', 'ID', 'NAMED_OPERATOR', 'WHERE'] lex: literals = ['*', '.', '[', ']', '(', ')', '$', ',', ':', '|', '&'] lex: states = {'INITIAL': 'inclusive', 'singlequote': 'exclusive', 'doublequote': 'exclusive', 'backquote': 'exclusive'} lex: Adding rule t_ID -> '[a-zA-Z_@][a-zA-Z0-9_@\-]*' (state 'INITIAL') lex: Adding rule t_NUMBER -> '-?\d+' (state 'INITIAL') lex: Adding rule t_singlequote -> ''' (state 'INITIAL') lex: Adding rule t_doublequote -> '"' (state 'INITIAL') lex: Adding rule t_backquote -> '`' (state 'INITIAL') lex: Adding rule t_newline -> '\n' (state 'INITIAL') lex: Adding rule t_DOUBLEDOT -> '\.\.' (state 'INITIAL') lex: Adding rule t_singlequote_content -> '[^'\\]+' (state 'singlequote') lex: Adding rule t_singlequote_escape -> '\\.' (state 'singlequote') lex: Adding rule t_singlequote_end -> ''' (state 'singlequote') lex: Adding rule t_doublequote_content -> '[^"\\]+' (state 'doublequote') lex: Adding rule t_doublequote_escape -> '\\.' (state 'doublequote') lex: Adding rule t_doublequote_end -> '"' (state 'doublequote') lex: Adding rule t_backquote_escape -> '\\.' (state 'backquote') lex: Adding rule t_backquote_content -> '[^`\\]+' (state 'backquote') lex: Adding rule t_backquote_end -> '`' (state 'backquote') lex: ==== MASTER REGEXS FOLLOW ==== lex: state 'INITIAL' : regex[0] = '(?P[a-zA-Z_@][a-zA-Z0-9_@\-]*)|(?P-?\d+)|(?P')|(?P")|(?P`)|(?P\n)|(?P\.\.)' lex: state 'singlequote' : regex[0] = '(?P[^'\\]+)|(?P\\.)|(?P')' lex: state 'doublequote' : regex[0] = '(?P[^"\\]+)|(?P\\.)|(?P")' lex: state 'backquote' : regex[0] = '(?P\\.)|(?P[^`\\]+)|(?P`)' lex: tokens = ['DOUBLEDOT', 'NUMBER', 'ID', 'NAMED_OPERATOR', 'WHERE'] lex: literals = ['*', '.', '[', ']', '(', ')', '$', ',', ':', '|', '&'] lex: states = {'INITIAL': 'inclusive', 'singlequote': 'exclusive', 'doublequote': 'exclusive', 'backquote': 'exclusive'} lex: Adding rule t_ID -> '[a-zA-Z_@][a-zA-Z0-9_@\-]*' (state 'INITIAL') lex: Adding rule t_NUMBER -> '-?\d+' (state 'INITIAL') lex: Adding rule t_singlequote -> ''' (state 'INITIAL') lex: Adding rule t_doublequote -> '"' (state 'INITIAL') lex: Adding rule t_backquote -> '`' (state 'INITIAL') lex: Adding rule t_newline -> '\n' (state 'INITIAL') lex: Adding rule t_DOUBLEDOT -> '\.\.' (state 'INITIAL') lex: Adding rule t_singlequote_content -> '[^'\\]+' (state 'singlequote') lex: Adding rule t_singlequote_escape -> '\\.' (state 'singlequote') lex: Adding rule t_singlequote_end -> ''' (state 'singlequote') lex: Adding rule t_doublequote_content -> '[^"\\]+' (state 'doublequote') lex: Adding rule t_doublequote_escape -> '\\.' (state 'doublequote') lex: Adding rule t_doublequote_end -> '"' (state 'doublequote') lex: Adding rule t_backquote_escape -> '\\.' (state 'backquote') lex: Adding rule t_backquote_content -> '[^`\\]+' (state 'backquote') lex: Adding rule t_backquote_end -> '`' (state 'backquote') lex: ==== MASTER REGEXS FOLLOW ==== lex: state 'INITIAL' : regex[0] = '(?P[a-zA-Z_@][a-zA-Z0-9_@\-]*)|(?P-?\d+)|(?P')|(?P")|(?P`)|(?P\n)|(?P\.\.)' lex: state 'singlequote' : regex[0] = '(?P[^'\\]+)|(?P\\.)|(?P')' lex: state 'doublequote' : regex[0] = '(?P[^"\\]+)|(?P\\.)|(?P")' lex: state 'backquote' : regex[0] = '(?P\\.)|(?P[^`\\]+)|(?P`)' lex: tokens = ['DOUBLEDOT', 'NUMBER', 'ID', 'NAMED_OPERATOR', 'WHERE'] lex: literals = ['*', '.', '[', ']', '(', ')', '$', ',', ':', '|', '&'] lex: states = {'INITIAL': 'inclusive', 'singlequote': 'exclusive', 'doublequote': 'exclusive', 'backquote': 'exclusive'} lex: Adding rule t_ID -> '[a-zA-Z_@][a-zA-Z0-9_@\-]*' (state 'INITIAL') lex: Adding rule t_NUMBER -> '-?\d+' (state 'INITIAL') lex: Adding rule t_singlequote -> ''' (state 'INITIAL') lex: Adding rule t_doublequote -> '"' (state 'INITIAL') lex: Adding rule t_backquote -> '`' (state 'INITIAL') lex: Adding rule t_newline -> '\n' (state 'INITIAL') lex: Adding rule t_DOUBLEDOT -> '\.\.' (state 'INITIAL') lex: Adding rule t_singlequote_content -> '[^'\\]+' (state 'singlequote') lex: Adding rule t_singlequote_escape -> '\\.' (state 'singlequote') lex: Adding rule t_singlequote_end -> ''' (state 'singlequote') lex: Adding rule t_doublequote_content -> '[^"\\]+' (state 'doublequote') lex: Adding rule t_doublequote_escape -> '\\.' (state 'doublequote') lex: Adding rule t_doublequote_end -> '"' (state 'doublequote') lex: Adding rule t_backquote_escape -> '\\.' (state 'backquote') lex: Adding rule t_backquote_content -> '[^`\\]+' (state 'backquote') lex: Adding rule t_backquote_end -> '`' (state 'backquote') lex: ==== MASTER REGEXS FOLLOW ==== lex: state 'INITIAL' : regex[0] = '(?P[a-zA-Z_@][a-zA-Z0-9_@\-]*)|(?P-?\d+)|(?P')|(?P")|(?P`)|(?P\n)|(?P\.\.)' lex: state 'singlequote' : regex[0] = '(?P[^'\\]+)|(?P\\.)|(?P')' lex: state 'doublequote' : regex[0] = '(?P[^"\\]+)|(?P\\.)|(?P")' lex: state 'backquote' : regex[0] = '(?P\\.)|(?P[^`\\]+)|(?P`)' ok test_DatumInContext_in_context (tests.test_jsonpath.TestDatumInContext) ... ok test_DatumInContext_init (tests.test_jsonpath.TestDatumInContext) ... ok test_child_auto_id (tests.test_jsonpath.TestJsonPath) ... ok test_child_paths (tests.test_jsonpath.TestJsonPath) ... ok test_child_value (tests.test_jsonpath.TestJsonPath) ... ok test_descendants_auto_id (tests.test_jsonpath.TestJsonPath) ... ok test_descendants_paths (tests.test_jsonpath.TestJsonPath) ... ok test_descendants_value (tests.test_jsonpath.TestJsonPath) ... ok test_fields_auto_id (tests.test_jsonpath.TestJsonPath) ... ok test_fields_paths (tests.test_jsonpath.TestJsonPath) ... ok test_fields_value (tests.test_jsonpath.TestJsonPath) ... ok test_hyphen_key (tests.test_jsonpath.TestJsonPath) ... ok test_index_auto_id (tests.test_jsonpath.TestJsonPath) ... ok test_index_paths (tests.test_jsonpath.TestJsonPath) ... ok test_index_value (tests.test_jsonpath.TestJsonPath) ... ok test_parent_value (tests.test_jsonpath.TestJsonPath) ... ok test_root_auto_id (tests.test_jsonpath.TestJsonPath) ... ok test_root_paths (tests.test_jsonpath.TestJsonPath) ... ok test_root_value (tests.test_jsonpath.TestJsonPath) ... ok test_slice_auto_id (tests.test_jsonpath.TestJsonPath) ... ok test_slice_paths (tests.test_jsonpath.TestJsonPath) ... ok test_slice_value (tests.test_jsonpath.TestJsonPath) ... ok test_this_auto_id (tests.test_jsonpath.TestJsonPath) ... ok test_this_paths (tests.test_jsonpath.TestJsonPath) ... ok test_this_value (tests.test_jsonpath.TestJsonPath) ... ok test_atomic (tests.test_parser.TestParser) ... /builddir/build/BUILD/python-jsonpath-rw-1.4.0/jsonpath_rw/parser.py:47: ResourceWarning: unclosed file <_io.TextIOWrapper name='/builddir/build/BUILD/python-jsonpath-rw-1.4.0/jsonpath_rw/parser.out' mode='w' encoding='UTF-8'> new_parser = ply.yacc.yacc(module=self, ResourceWarning: Enable tracemalloc to get the object allocation traceback ok test_nested (tests.test_parser.TestParser) ... ok test_filename_mode (tests.bin.test_jsonpath.TestJsonPathScript) ... ok test_stdin_mode (tests.bin.test_jsonpath.TestJsonPathScript) ... ok ---------------------------------------------------------------------- Ran 31 tests in 1.010s OK LexToken($,'$',1,0) LexToken($,'$',-1,-1) LexToken(ID,'hello',1,6) LexToken(ID,'hello',-1,-1) LexToken(ID,'goodbye',1,8) LexToken(ID,'goodbye',-1,-1) LexToken(ID,'doublequote"',1,13) LexToken(ID,'doublequote"',-1,-1) LexToken(ID,'doublequote"',1,14) LexToken(ID,'doublequote"',-1,-1) LexToken(ID,"singlequote'",1,14) LexToken(ID,"singlequote'",-1,-1) LexToken(ID,"singlequote'",1,13) LexToken(ID,"singlequote'",-1,-1) LexToken(ID,'fuzz',1,0) LexToken(ID,'fuzz',-1,-1) LexToken(NUMBER,1,1,0) LexToken(NUMBER,1,-1,-1) LexToken(NUMBER,45,1,0) LexToken(NUMBER,45,-1,-1) LexToken(NUMBER,-1,1,0) LexToken(NUMBER,-1,-1,-1) LexToken(NUMBER,-13,1,1) LexToken(NUMBER,-13,-1,-1) LexToken(ID,'fuzz.bang',1,10) LexToken(ID,'fuzz.bang',-1,-1) LexToken(ID,'fuzz',1,0) LexToken(ID,'fuzz',-1,-1) LexToken(.,'.',1,4) LexToken(.,'.',-1,-1) LexToken(ID,'bang',1,5) LexToken(ID,'bang',-1,-1) LexToken(ID,'fuzz',1,0) LexToken(ID,'fuzz',-1,-1) LexToken(.,'.',1,4) LexToken(.,'.',-1,-1) LexToken(*,'*',1,5) LexToken(*,'*',-1,-1) LexToken(ID,'fuzz',1,0) LexToken(ID,'fuzz',-1,-1) LexToken(DOUBLEDOT,'..',1,4) LexToken(DOUBLEDOT,'..',-1,-1) LexToken(ID,'bang',1,6) LexToken(ID,'bang',-1,-1) LexToken(&,'&',1,0) LexToken(&,'&',-1,-1) LexToken(ID,'@',1,0) LexToken(ID,'@',-1,-1) LexToken(NAMED_OPERATOR,'this',1,5) LexToken(NAMED_OPERATOR,'this',-1,-1) LexToken(|,'|',1,0) LexToken(|,'|',-1,-1) LexToken(WHERE,'where',1,0) LexToken(WHERE,'where',-1,-1) parse("foo.baz.id").find({'foo': {'baz': 3}}) =?= ['foo.baz'] parse("foo.baz.id").find({'foo': {'baz': [3]}}) =?= ['foo.baz'] parse("foo.baz.id").find({'foo': {'id': 'bizzle', 'baz': 3}}) =?= ['bizzle.baz'] parse("foo.baz.id").find({'foo': {'baz': {'id': 'hi'}}}) =?= ['foo.hi'] parse("foo.baz.bizzle.id").find({'foo': {'baz': {'bizzle': 5}}}) =?= ['foo.baz.bizzle'] parse("foo.baz").find({'foo': {'baz': 3}}).paths =?= ['foo.baz'] parse("foo.baz").find({'foo': {'baz': [3]}}).paths =?= ['foo.baz'] parse("foo.baz.bizzle").find({'foo': {'baz': {'bizzle': 5}}}).paths =?= ['foo.baz.bizzle'] parse("foo.baz").find({'foo': {'baz': 3}}) =?= [3] parse("foo.baz").find({'foo': {'baz': [3]}}) =?= [[3]] parse("foo.baz.bizzle").find({'foo': {'baz': {'bizzle': 5}}}) =?= [5] parse("foo..baz.id").find({'foo': {'baz': 1, 'bing': {'baz': 2}}}) =?= ['foo.baz', 'foo.bing.baz'] parse("foo..baz").find({'foo': {'baz': 1, 'bing': {'baz': 2}}}).paths =?= ['foo.baz', 'foo.bing.baz'] parse("foo..baz").find({'foo': {'baz': 1, 'bing': {'baz': 2}}}) =?= [1, 2] parse("foo..baz").find({'foo': [{'baz': 1}, {'baz': 2}]}) =?= [1, 2] parse("foo.id").find({'foo': 'baz'}) =?= ['foo'] parse("foo.id").find({'foo': {'id': 'baz'}}) =?= ['baz'] parse("foo,baz.id").find({'foo': 1, 'baz': 2}) =?= ['foo', 'baz'] parse("*.id").find({'foo': {'id': 1}, 'baz': 2}) =?= {'1', 'baz'} parse("foo").find({'foo': 'baz'}).paths =?= ['foo'] parse("foo,baz").find({'foo': 1, 'baz': 2}).paths =?= ['foo', 'baz'] parse("*").find({'foo': 1, 'baz': 2}).paths =?= {'foo', 'baz'} parse("*").find({'foo': 1, 'baz': 2}).paths =?= {'foo', 'id', 'baz'} parse("foo").find({'foo': 'baz'}) =?= ['baz'] parse("foo,baz").find({'foo': 1, 'baz': 2}) =?= [1, 2] parse("@foo").find({'@foo': 1}) =?= [1] parse("*").find({'foo': 1, 'baz': 2}) =?= {1, 2} parse("*").find({'foo': 1, 'baz': 2}) =?= {1, 2, '`this`'} parse("foo.bar-baz").find({'foo': {'bar-baz': 3}}) =?= [3] parse("foo.[bar-baz,blah-blah]").find({'foo': {'bar-baz': 3, 'blah-blah': 5}}) =?= [3, 5] parse("foo.-baz").find({'foo': {'-baz': 8}}) =?= [8] parse("[0].id").find([42]) =?= ['[0]'] parse("[2].id").find([34, 65, 29, 59]) =?= ['[2]'] parse("[0]").find([42]).paths =?= ['[0]'] parse("[2]").find([34, 65, 29, 59]).paths =?= ['[2]'] parse("[0]").find([42]) =?= [42] parse("[5]").find([42]) =?= [] parse("[2]").find([34, 65, 29, 59]) =?= [29] parse("foo.baz.`parent`").find({'foo': {'baz': 3}}) =?= [{'baz': 3}] parse("foo.`parent`.foo.baz.`parent`.baz.bizzle").find({'foo': {'baz': {'bizzle': 5}}}) =?= [5] parse("$.id").find({'foo': 'baz'}) =?= ['$'] parse("foo.$.id").find({'foo': 'baz', 'id': 'bizzle'}) =?= ['bizzle'] parse("foo.$.baz.id").find({'foo': 4, 'baz': 3}) =?= ['baz'] parse("$").find({'foo': 'baz'}).paths =?= ['$'] parse("foo.$").find({'foo': 'baz'}).paths =?= ['$'] parse("foo.$.foo").find({'foo': 'baz'}).paths =?= ['foo'] parse("$").find({'foo': 'baz'}) =?= [{'foo': 'baz'}] parse("foo.$").find({'foo': 'baz'}) =?= [{'foo': 'baz'}] parse("foo.$.foo").find({'foo': 'baz'}) =?= ['baz'] parse("[*].id").find([1, 2, 3]) =?= ['[0]', '[1]', '[2]'] parse("[1:].id").find([1, 2, 3, 4]) =?= ['[1]', '[2]', '[3]'] parse("[*]").find([1, 2, 3]).paths =?= ['[0]', '[1]', '[2]'] parse("[1:]").find([1, 2, 3, 4]).paths =?= ['[1]', '[2]', '[3]'] parse("[*]").find([1, 2, 3]) =?= [1, 2, 3] parse("[*]").find(range(1, 4)) =?= [1, 2, 3] parse("[1:]").find([1, 2, 3, 4]) =?= [2, 3, 4] parse("[:2]").find([1, 2, 3, 4]) =?= [1, 2] parse("[*]").find(1) =?= [1] parse("[0:]").find(1) =?= [1] parse("[*]").find({'foo': 1}) =?= [{'foo': 1}] parse("[*].foo").find({'foo': 1}) =?= [1] parse("id").find({'foo': 'baz'}) =?= ['`this`'] parse("foo.`this`.id").find({'foo': 'baz'}) =?= ['foo'] parse("foo.`this`.baz.id").find({'foo': {'baz': 3}}) =?= ['foo.baz'] parse("`this`").find({'foo': 'baz'}).paths =?= ['`this`'] parse("foo.`this`").find({'foo': 'baz'}).paths =?= ['foo'] parse("foo.`this`.baz").find({'foo': {'baz': 3}}).paths =?= ['foo.baz'] parse("`this`").find({'foo': 'baz'}) =?= [{'foo': 'baz'}] parse("foo.`this`").find({'foo': 'baz'}) =?= ['baz'] parse("foo.`this`.baz").find({'foo': {'baz': 3}}) =?= [3] foo =?= foo * =?= * baz,bizzle =?= baz,bizzle [1] =?= [1] [1:] =?= [1] [:] =?= [*] [*] =?= [*] [:2] =?= [:2] [1:2] =?= [1:2] [5:-2] =?= [5:-2] foo.baz =?= foo.baz foo.baz,bizzle =?= foo.baz,bizzle foo where baz =?= foo where baz foo..baz =?= foo..baz foo..baz.bing =?= foo..baz.bing + RPM_EC=0 ++ jobs -p + exit 0 Processing files: python3-jsonpath-rw-1.4.0-3.ocs23.noarch Executing(%doc): /bin/sh -e /var/tmp/rpm-tmp.CsN9w7 + umask 022 + cd /builddir/build/BUILD + cd python-jsonpath-rw-1.4.0 + DOCDIR=/builddir/build/BUILDROOT/python-jsonpath-rw-1.4.0-3.ocs23.noarch/usr/share/doc/python3-jsonpath-rw + export LC_ALL=C + LC_ALL=C + export DOCDIR + /usr/bin/mkdir -p /builddir/build/BUILDROOT/python-jsonpath-rw-1.4.0-3.ocs23.noarch/usr/share/doc/python3-jsonpath-rw + cp -pr README.rst /builddir/build/BUILDROOT/python-jsonpath-rw-1.4.0-3.ocs23.noarch/usr/share/doc/python3-jsonpath-rw + RPM_EC=0 ++ jobs -p + exit 0 Executing(%license): /bin/sh -e /var/tmp/rpm-tmp.MHuR8l + umask 022 + cd /builddir/build/BUILD + cd python-jsonpath-rw-1.4.0 + LICENSEDIR=/builddir/build/BUILDROOT/python-jsonpath-rw-1.4.0-3.ocs23.noarch/usr/share/licenses/python3-jsonpath-rw + export LC_ALL=C + LC_ALL=C + export LICENSEDIR + /usr/bin/mkdir -p /builddir/build/BUILDROOT/python-jsonpath-rw-1.4.0-3.ocs23.noarch/usr/share/licenses/python3-jsonpath-rw + cp -pr LICENSE /builddir/build/BUILDROOT/python-jsonpath-rw-1.4.0-3.ocs23.noarch/usr/share/licenses/python3-jsonpath-rw + RPM_EC=0 ++ jobs -p + exit 0 Provides: python-jsonpath-rw = 1.4.0-3.ocs23 python3-jsonpath-rw = 1.4.0-3.ocs23 python3.10-jsonpath-rw = 1.4.0-3.ocs23 python3.10dist(jsonpath-rw) = 1.4 python3dist(jsonpath-rw) = 1.4 Requires(rpmlib): rpmlib(CompressedFileNames) <= 3.0.4-1 rpmlib(FileDigests) <= 4.6.0-1 rpmlib(PartialHardlinkSets) <= 4.0.4-1 rpmlib(PayloadFilesHavePrefix) <= 4.0-1 Requires: /usr/bin/python3 python(abi) = 3.10 python3.10dist(decorator) python3.10dist(ply) python3.10dist(six) Obsoletes: python3.10-jsonpath-rw < 1.4.0-3.ocs23 Checking for unpackaged file(s): /usr/lib/rpm/check-files /builddir/build/BUILDROOT/python-jsonpath-rw-1.4.0-3.ocs23.noarch Wrote: /builddir/build/RPMS/python3-jsonpath-rw-1.4.0-3.ocs23.noarch.rpm Child return code was: 0