Python 2 and 3 compatibility

With Python 2 (slowly) approaching EOL and its removal from distribitions
already being planned (Fedora), the existing Python 2 code needs to be
transitioned to Python 3.  This patch stack updates the Python code to
be compatible with Python 2.7, 3.4, 3.5, 3.6, and 3.7.

Reviewed-by: John Ramsden <johnramsden@riseup.net>
Reviewed-by: Neal Gompa <ngompa@datto.com>
Reviewed-by: loli10K <ezomori.nozomu@gmail.com>
Reviewed-by: Brian Behlendorf <behlendorf1@llnl.gov>
Reviewed-by: John Wren Kennedy <john.kennedy@delphix.com>
Reviewed-by: Antonio Russo <antonio.e.russo@gmail.com>
Closes #8096
This commit is contained in:
Brian Behlendorf 2019-01-06 10:41:16 -08:00
commit c87db59196
45 changed files with 1596 additions and 1397 deletions

2
.gitignore vendored
View File

@ -22,6 +22,8 @@
*.swp
*.gcno
*.gcda
*.pyc
*.pyo
.deps
.libs
.dirstamp

View File

@ -1 +1,13 @@
dist_bin_SCRIPTS = arc_summary.py arc_summary3.py
EXTRA_DIST = arc_summary2 arc_summary3
if USING_PYTHON_2
dist_bin_SCRIPTS = arc_summary2
install-exec-hook:
mv $(DESTDIR)$(bindir)/arc_summary2 $(DESTDIR)$(bindir)/arc_summary
endif
if USING_PYTHON_3
dist_bin_SCRIPTS = arc_summary3
install-exec-hook:
mv $(DESTDIR)$(bindir)/arc_summary3 $(DESTDIR)$(bindir)/arc_summary
endif

View File

@ -1,4 +1,4 @@
#!/usr/bin/python
#!/usr/bin/python2
#
# $Id: arc_summary.pl,v 388:e27800740aa2 2011-07-08 02:53:29Z jhell $
#
@ -35,6 +35,8 @@
# Note some of this code uses older code (eg getopt instead of argparse,
# subprocess.Popen() instead of subprocess.run()) because we need to support
# some very old versions of Python.
#
"""Print statistics on the ZFS Adjustable Replacement Cache (ARC)
Provides basic information on the ARC, its efficiency, the L2ARC (if present),
@ -1005,7 +1007,7 @@ def zfs_header():
def usage():
"""Print usage information"""
sys.stdout.write("Usage: arc_summary.py [-h] [-a] [-d] [-p PAGE]\n\n")
sys.stdout.write("Usage: arc_summary [-h] [-a] [-d] [-p PAGE]\n\n")
sys.stdout.write("\t -h, --help : "
"Print this help message and exit\n")
sys.stdout.write("\t -a, --alternate : "
@ -1018,10 +1020,10 @@ def usage():
"should be an integer between 1 and " +
str(len(unSub)) + "\n\n")
sys.stdout.write("Examples:\n")
sys.stdout.write("\tarc_summary.py -a\n")
sys.stdout.write("\tarc_summary.py -p 4\n")
sys.stdout.write("\tarc_summary.py -ad\n")
sys.stdout.write("\tarc_summary.py --page=2\n")
sys.stdout.write("\tarc_summary -a\n")
sys.stdout.write("\tarc_summary -p 4\n")
sys.stdout.write("\tarc_summary -ad\n")
sys.stdout.write("\tarc_summary --page=2\n")
def main():

View File

@ -346,7 +346,7 @@ def get_version(request):
error_msg = '(ERROR: "{0}" requested)'.format(request)
return error_msg
# The original arc_summary.py called /sbin/modinfo/{spl,zfs} to get
# The original arc_summary called /sbin/modinfo/{spl,zfs} to get
# the version information. We switch to /sys/module/{spl,zfs}/version
# to make sure we get what is really loaded in the kernel
command = ["cat", "/sys/module/{0}/version".format(request)]
@ -374,7 +374,7 @@ def print_header():
"""
# datetime is now recommended over time but we keep the exact formatting
# from the older version of arc_summary.py in case there are scripts
# from the older version of arc_summary in case there are scripts
# that expect it in this way
daydate = time.strftime(DATE_FORMAT)
spc_date = LINE_LENGTH-len(daydate)
@ -586,7 +586,7 @@ def section_archits(kstats_dict):
# For some reason, anon_hits can turn negative, which is weird. Until we
# have figured out why this happens, we just hide the problem, following
# the behavior of the original arc_summary.py
# the behavior of the original arc_summary.
if anon_hits >= 0:
prt_i2('Anonymously used:',
f_perc(anon_hits, arc_stats['hits']), f_hits(anon_hits))

View File

@ -1 +1,13 @@
dist_bin_SCRIPTS = arcstat.py
dist_bin_SCRIPTS = arcstat
#
# The arcstat script is compatibile with both Python 2.6 and 3.4.
# As such the python 3 shebang can be replaced at install time when
# targeting a python 2 system. This allows us to maintain a single
# version of the source.
#
if USING_PYTHON_2
install-exec-hook:
sed --in-place 's|^#!/usr/bin/python3|#!/usr/bin/python2|' \
$(DESTDIR)$(bindir)/arcstat
endif

View File

@ -1,4 +1,4 @@
#!/usr/bin/python
#!/usr/bin/python3
#
# Print out ZFS ARC Statistics exported via kstat(1)
# For a definition of fields, or usage, use arctstat.pl -v
@ -42,7 +42,8 @@
# @hdr is the array of fields that needs to be printed, so we
# just iterate over this array and print the values using our pretty printer.
#
# This script must remain compatible with Python 2.6+ and Python 3.4+.
#
import sys
import time
@ -109,7 +110,7 @@ opfile = None
sep = " " # Default separator is 2 spaces
version = "0.4"
l2exist = False
cmd = ("Usage: arcstat.py [-hvx] [-f fields] [-o file] [-s string] [interval "
cmd = ("Usage: arcstat [-hvx] [-f fields] [-o file] [-s string] [interval "
"[count]]\n")
cur = {}
d = {}
@ -138,10 +139,10 @@ def usage():
sys.stderr.write("\t -s : Override default field separator with custom "
"character or string\n")
sys.stderr.write("\nExamples:\n")
sys.stderr.write("\tarcstat.py -o /tmp/a.log 2 10\n")
sys.stderr.write("\tarcstat.py -s \",\" -o /tmp/a.log 2 10\n")
sys.stderr.write("\tarcstat.py -v\n")
sys.stderr.write("\tarcstat.py -f time,hit%,dh%,ph%,mh% 1\n")
sys.stderr.write("\tarcstat -o /tmp/a.log 2 10\n")
sys.stderr.write("\tarcstat -s \",\" -o /tmp/a.log 2 10\n")
sys.stderr.write("\tarcstat -v\n")
sys.stderr.write("\tarcstat -f time,hit%,dh%,ph%,mh% 1\n")
sys.stderr.write("\n")
sys.exit(1)

View File

@ -1 +1,13 @@
dist_bin_SCRIPTS = dbufstat.py
dist_bin_SCRIPTS = dbufstat
#
# The dbufstat script is compatibile with both Python 2.6 and 3.4.
# As such the python 3 shebang can be replaced at install time when
# targeting a python 2 system. This allows us to maintain a single
# version of the source.
#
if USING_PYTHON_2
install-exec-hook:
sed --in-place 's|^#!/usr/bin/python3|#!/usr/bin/python2|' \
$(DESTDIR)$(bindir)/dbufstat
endif

View File

@ -1,4 +1,4 @@
#!/usr/bin/python
#!/usr/bin/python3
#
# Print out statistics for all cached dmu buffers. This information
# is available through the dbufs kstat and may be post-processed as
@ -27,6 +27,8 @@
# Copyright (C) 2013 Lawrence Livermore National Security, LLC.
# Produced at Lawrence Livermore National Laboratory (cf, DISCLAIMER).
#
# This script must remain compatible with Python 2.6+ and Python 3.4+.
#
import sys
import getopt
@ -106,7 +108,7 @@ cols = {
hdr = None
xhdr = None
sep = " " # Default separator is 2 spaces
cmd = ("Usage: dbufstat.py [-bdhnrtvx] [-i file] [-f fields] [-o file] "
cmd = ("Usage: dbufstat [-bdhnrtvx] [-i file] [-f fields] [-o file] "
"[-s string] [-F filter]\n")
raw = 0
@ -167,11 +169,11 @@ def usage():
"character or string\n")
sys.stderr.write("\t -F : Filter output by value or regex\n")
sys.stderr.write("\nExamples:\n")
sys.stderr.write("\tdbufstat.py -d -o /tmp/d.log\n")
sys.stderr.write("\tdbufstat.py -t -s \",\" -o /tmp/t.log\n")
sys.stderr.write("\tdbufstat.py -v\n")
sys.stderr.write("\tdbufstat.py -d -f pool,object,objset,dsize,cached\n")
sys.stderr.write("\tdbufstat.py -bx -F dbc=1,objset=54,pool=testpool\n")
sys.stderr.write("\tdbufstat -d -o /tmp/d.log\n")
sys.stderr.write("\tdbufstat -t -s \",\" -o /tmp/t.log\n")
sys.stderr.write("\tdbufstat -v\n")
sys.stderr.write("\tdbufstat -d -f pool,object,objset,dsize,cached\n")
sys.stderr.write("\tdbufstat -bx -F dbc=1,objset=54,pool=testpool\n")
sys.stderr.write("\n")
sys.exit(1)

102
config/always-python.m4 Normal file
View File

@ -0,0 +1,102 @@
dnl #
dnl # ZFS_AC_PYTHON_VERSION(version, [action-if-true], [action-if-false])
dnl #
dnl # Verify Python version
dnl #
AC_DEFUN([ZFS_AC_PYTHON_VERSION], [
ver_check=`$PYTHON -c "import sys; print (sys.version.split()[[0]] $1)"`
AS_IF([test "$ver_check" = "True"], [
m4_ifvaln([$2], [$2])
], [
m4_ifvaln([$3], [$3])
])
])
dnl #
dnl # ZFS_AC_PYTHON_MODULE(module_name, [action-if-true], [action-if-false])
dnl #
dnl # Checks for Python module. Freely inspired by AX_PYTHON_MODULE
dnl # https://www.gnu.org/software/autoconf-archive/ax_python_module.html
dnl # Required by ZFS_AC_CONFIG_ALWAYS_PYZFS.
dnl #
AC_DEFUN([ZFS_AC_PYTHON_MODULE], [
PYTHON_NAME=`basename $PYTHON`
AC_MSG_CHECKING([for $PYTHON_NAME module: $1])
AS_IF([$PYTHON -c "import $1" 2>/dev/null], [
AC_MSG_RESULT(yes)
m4_ifvaln([$2], [$2])
], [
AC_MSG_RESULT(no)
m4_ifvaln([$3], [$3])
])
])
dnl #
dnl # The majority of the python scripts are written to be compatible
dnl # with Python 2.6 and Python 3.4. Therefore, they may be installed
dnl # and used with either interpreter. This option is intended to
dnl # to provide a method to specify the default system version, and
dnl # set the PYTHON environment variable accordingly.
dnl #
AC_DEFUN([ZFS_AC_CONFIG_ALWAYS_PYTHON], [
AC_ARG_WITH([python],
AC_HELP_STRING([--with-python[=VERSION]],
[default system python version @<:@default=check@:>@]),
[with_python=$withval],
[with_python=check])
AS_CASE([$with_python],
[check],
[AS_IF([test -x /usr/bin/python3],
[PYTHON="python3"],
[AS_IF([test -x /usr/bin/python2],
[PYTHON="python2"],
[PYTHON=""]
)]
)],
[2*], [PYTHON="python${with_python}"],
[*python2*], [PYTHON="${with_python}"],
[3*], [PYTHON="python${with_python}"],
[*python3*], [PYTHON="${with_python}"],
[no], [PYTHON=""],
[AC_MSG_ERROR([Unknown --with-python value '$with_python'])]
)
AS_IF([$PYTHON --version >/dev/null 2>&1], [ /bin/true ], [
AC_MSG_ERROR([Cannot find $PYTHON in your system path])
])
AM_PATH_PYTHON([2.6], [], [:])
AM_CONDITIONAL([USING_PYTHON], [test "$PYTHON" != :])
AM_CONDITIONAL([USING_PYTHON_2], [test "${PYTHON_VERSION:0:2}" = "2."])
AM_CONDITIONAL([USING_PYTHON_3], [test "${PYTHON_VERSION:0:2}" = "3."])
dnl #
dnl # Minimum supported Python versions for utilities:
dnl # Python 2.6.x, or Python 3.4.x
dnl #
AS_IF([test "${PYTHON_VERSION:0:2}" = "2."], [
ZFS_AC_PYTHON_VERSION([>= '2.6'], [ /bin/true ],
[AC_MSG_ERROR("Python >= 2.6.x is not available")])
])
AS_IF([test "${PYTHON_VERSION:0:2}" = "3."], [
ZFS_AC_PYTHON_VERSION([>= '3.4'], [ /bin/true ],
[AC_MSG_ERROR("Python >= 3.4.x is not available")])
])
dnl #
dnl # Request that packages be built for a specific Python version.
dnl #
AS_IF([test $with_python != check], [
PYTHON_PKG_VERSION=`echo ${PYTHON} | tr -d 'a-zA-Z.'`
DEFINE_PYTHON_PKG_VERSION='--define "__use_python_pkg_version '${PYTHON_PKG_VERSION}'"'
DEFINE_PYTHON_VERSION='--define "__use_python '${PYTHON}'"'
], [
DEFINE_PYTHON_VERSION=''
DEFINE_PYTHON_PKG_VERSION=''
])
AC_SUBST(DEFINE_PYTHON_VERSION)
AC_SUBST(DEFINE_PYTHON_PKG_VERSION)
])

View File

@ -1,80 +1,44 @@
dnl #
dnl # ZFS_AC_PYTHON_MODULE(module_name, [action-if-true], [action-if-false])
dnl # Determines if pyzfs can be built, requires Python 2.7 or latter.
dnl #
dnl # Checks for Python module. Freely inspired by AX_PYTHON_MODULE
dnl # https://www.gnu.org/software/autoconf-archive/ax_python_module.html
dnl #
AC_DEFUN([ZFS_AC_PYTHON_MODULE],[
PYTHON_NAME=`basename $PYTHON`
AC_MSG_CHECKING([for $PYTHON_NAME module: $1])
$PYTHON -c "import $1" 2>/dev/null
if test $? -eq 0;
then
AC_MSG_RESULT(yes)
m4_ifvaln([$2], [$2])
else
AC_MSG_RESULT(no)
m4_ifvaln([$3], [$3])
fi
])
dnl #
dnl # ZFS_AC_PYTHON_VERSION(version, [action-if-true], [action-if-false])
dnl #
dnl # Verify Python version
dnl #
AC_DEFUN([ZFS_AC_PYTHON_VERSION], [
AC_MSG_CHECKING([for a version of Python $1])
version_check=`$PYTHON -c "import sys; print (sys.version.split()[[0]] $1)"`
if test "$version_check" = "True";
then
AC_MSG_RESULT(yes)
m4_ifvaln([$2], [$2])
else
AC_MSG_RESULT(no)
m4_ifvaln([$3], [$3])
fi
])
AC_DEFUN([ZFS_AC_CONFIG_ALWAYS_PYZFS], [
PYTHON_REQUIRED_VERSION="<= '2.7.x'"
AC_ARG_ENABLE([pyzfs],
AC_HELP_STRING([--enable-pyzfs],
[install libzfs_core python bindings @<:@default=check@:>@]),
[enable_pyzfs=$enableval],
[enable_pyzfs=check])
AM_PATH_PYTHON([2.7], [], [
dnl #
dnl # Packages for pyzfs specifically enabled/disabled.
dnl #
AS_IF([test "x$enable_pyzfs" != xcheck], [
AS_IF([test "x$enable_pyzfs" = xyes], [
AC_MSG_ERROR("python >= 2.7 is not installed")
], [test ! "x$enable_pyzfs" = xno], [
enable_pyzfs=no
])
])
AM_CONDITIONAL([HAVE_PYTHON], [test "$PYTHON" != :])
dnl #
dnl # Python 2.7.x is supported, other versions (3.5) are not yet
dnl #
AS_IF([test "x$enable_pyzfs" = xcheck], [
ZFS_AC_PYTHON_VERSION([$PYTHON_REQUIRED_VERSION], [], [
AS_IF([test "x$enable_pyzfs" = xyes], [
AC_MSG_ERROR("Python $PYTHON_REQUIRED_VERSION is not available")
], [test ! "x$enable_pyzfs" = xno], [
enable_pyzfs=no
])
DEFINE_PYZFS='--with pyzfs'
], [
DEFINE_PYZFS='--without pyzfs'
])
], [
DEFINE_PYZFS=''
])
AC_SUBST(DEFINE_PYZFS)
dnl #
dnl # Require python-devel libraries
dnl #
AS_IF([test "x$enable_pyzfs" = xcheck], [
AS_IF([test "x$enable_pyzfs" = xcheck -o "x$enable_pyzfs" = xyes], [
AS_IF([test "${PYTHON_VERSION:0:2}" = "2."], [
PYTHON_REQUIRED_VERSION=">= '2.7.0'"
], [
AS_IF([test "${PYTHON_VERSION:0:2}" = "3."], [
PYTHON_REQUIRED_VERSION=">= '3.4.0'"
], [
AC_MSG_ERROR("Python $PYTHON_VERSION unknown")
])
])
AX_PYTHON_DEVEL([$PYTHON_REQUIRED_VERSION], [
AS_IF([test "x$enable_pyzfs" = xyes], [
AC_MSG_ERROR("Python development library is not available")
AC_MSG_ERROR("Python $PYTHON_REQUIRED_VERSION development library is not installed")
], [test ! "x$enable_pyzfs" = xno], [
enable_pyzfs=no
])
@ -84,10 +48,10 @@ AC_DEFUN([ZFS_AC_CONFIG_ALWAYS_PYZFS], [
dnl #
dnl # Python "setuptools" module is required to build and install pyzfs
dnl #
AS_IF([test "x$enable_pyzfs" = xcheck], [
AS_IF([test "x$enable_pyzfs" = xcheck -o "x$enable_pyzfs" = xyes], [
ZFS_AC_PYTHON_MODULE([setuptools], [], [
AS_IF([test "x$enable_pyzfs" = xyes], [
AC_MSG_ERROR("python-setuptools is not installed")
AC_MSG_ERROR("Python $PYTHON_VERSION setuptools is not installed")
], [test ! "x$enable_pyzfs" = xno], [
enable_pyzfs=no
])
@ -97,10 +61,10 @@ AC_DEFUN([ZFS_AC_CONFIG_ALWAYS_PYZFS], [
dnl #
dnl # Python "cffi" module is required to run pyzfs
dnl #
AS_IF([test "x$enable_pyzfs" = xcheck], [
AS_IF([test "x$enable_pyzfs" = xcheck -o "x$enable_pyzfs" = xyes], [
ZFS_AC_PYTHON_MODULE([cffi], [], [
AS_IF([test "x$enable_pyzfs" = xyes], [
AC_MSG_ERROR("python-cffi is not installed")
AC_MSG_ERROR("Python $PYTHON_VERSION cffi is not installed")
], [test ! "x$enable_pyzfs" = xno], [
enable_pyzfs=no
])
@ -114,12 +78,8 @@ AC_DEFUN([ZFS_AC_CONFIG_ALWAYS_PYZFS], [
AM_CONDITIONAL([PYZFS_ENABLED], [test x$enable_pyzfs = xyes])
AC_SUBST([PYZFS_ENABLED], [$enable_pyzfs])
AS_IF([test "x$enable_pyzfs" = xyes], [
DEFINE_PYZFS='--define "_pyzfs 1"'
],[
DEFINE_PYZFS=''
])
AC_SUBST(DEFINE_PYZFS)
AC_SUBST(pythonsitedir, [$PYTHON_SITE_PKG])
AC_MSG_CHECKING([whether to enable pyzfs: ])
AC_MSG_RESULT($enable_pyzfs)
])

View File

@ -47,7 +47,7 @@ deb-utils: deb-local rpm-utils
pkg7=$${name}-test-$${version}.$${arch}.rpm; \
pkg8=$${name}-dracut-$${version}.$${arch}.rpm; \
pkg9=$${name}-initramfs-$${version}.$${arch}.rpm; \
pkg10=pyzfs-$${version}.noarch.rpm; \
pkg10=`ls python*-pyzfs-$${version}* | tail -1`; \
## Arguments need to be passed to dh_shlibdeps. Alien provides no mechanism
## to do this, so we install a shim onto the path which calls the real
## dh_shlibdeps with the required arguments.

View File

@ -160,6 +160,7 @@ AC_DEFUN([ZFS_AC_CONFIG_ALWAYS], [
ZFS_AC_CONFIG_ALWAYS_CC_ASAN
ZFS_AC_CONFIG_ALWAYS_TOOLCHAIN_SIMD
ZFS_AC_CONFIG_ALWAYS_ARCH
ZFS_AC_CONFIG_ALWAYS_PYTHON
ZFS_AC_CONFIG_ALWAYS_PYZFS
])
@ -264,10 +265,13 @@ AC_DEFUN([ZFS_AC_RPM], [
RPM_DEFINE_UTIL+=' $(DEFINE_INITRAMFS)'
RPM_DEFINE_UTIL+=' $(DEFINE_SYSTEMD)'
RPM_DEFINE_UTIL+=' $(DEFINE_PYZFS)'
RPM_DEFINE_UTIL+=' $(DEFINE_PYTHON_VERSION)'
RPM_DEFINE_UTIL+=' $(DEFINE_PYTHON_PKG_VERSION)'
dnl # Override default lib directory on Debian/Ubuntu systems. The provided
dnl # /usr/lib/rpm/platform/<arch>/macros files do not specify the correct
dnl # path for multiarch systems as described by the packaging guidelines.
dnl # Override default lib directory on Debian/Ubuntu systems. The
dnl # provided /usr/lib/rpm/platform/<arch>/macros files do not
dnl # specify the correct path for multiarch systems as described
dnl # by the packaging guidelines.
dnl #
dnl # https://wiki.ubuntu.com/MultiarchSpec
dnl # https://wiki.debian.org/Multiarch/Implementation

View File

@ -27,7 +27,7 @@ install-exec-local:
$(PYTHON) $(srcdir)/setup.py install \
--prefix $(prefix) \
--root $(DESTDIR)/ \
--install-lib $(pythondir) \
--install-lib $(pythonsitedir) \
--single-version-externally-managed \
--verbose

View File

@ -38,6 +38,7 @@ please visit its `GitHub repository <https://github.com/zfsonlinux/zfs>`_.
Maximum length of any ZFS name.
'''
from __future__ import absolute_import, division, print_function
from ._constants import (
MAXNAMELEN,

View File

@ -18,10 +18,12 @@
Important `libzfs_core` constants.
"""
from __future__ import absolute_import, division, print_function
# https://stackoverflow.com/a/1695250
def enum(*sequential, **named):
enums = dict(zip(sequential, range(len(sequential))), **named)
enums = dict(((b, a) for a, b in enumerate(sequential)), **named)
return type('Enum', (), enums)

View File

@ -26,6 +26,7 @@ corresponding interface functions.
The parameters and exceptions are documented in the `libzfs_core` interfaces.
"""
from __future__ import absolute_import, division, print_function
import errno
import re
@ -102,8 +103,9 @@ def lzc_snapshot_translate_errors(ret, errlist, snaps, props):
def _map(ret, name):
if ret == errno.EXDEV:
pool_names = map(_pool_name, snaps)
same_pool = all(x == pool_names[0] for x in pool_names)
pool_names = iter(map(_pool_name, snaps))
pool_name = next(pool_names, None)
same_pool = all(x == pool_name for x in pool_names)
if same_pool:
return lzc_exc.DuplicateSnapshots(name)
else:
@ -270,7 +272,8 @@ def lzc_hold_translate_errors(ret, errlist, holds, fd):
def lzc_release_translate_errors(ret, errlist, holds):
if ret == 0:
return
for _, hold_list in holds.iteritems():
for snap in holds:
hold_list = holds[snap]
if not isinstance(hold_list, list):
raise lzc_exc.TypeError('holds must be in a list')
@ -705,15 +708,17 @@ def _handle_err_list(ret, errlist, names, exception, mapper):
if len(errlist) == 0:
suppressed_count = 0
names = list(zip(names, range(2)))
if len(names) == 1:
name = names[0]
name, _ = names[0]
else:
name = None
errors = [mapper(ret, name)]
else:
errors = []
suppressed_count = errlist.pop('N_MORE_ERRORS', 0)
for name, err in errlist.iteritems():
for name in errlist:
err = errlist[name]
errors.append(mapper(err, name))
raise exception(errors, suppressed_count)
@ -727,7 +732,7 @@ def _pool_name(name):
'@' separates a snapshot name from the rest of the dataset name.
'#' separates a bookmark name from the rest of the dataset name.
'''
return re.split('[/@#]', name, 1)[0]
return re.split(b'[/@#]', name, 1)[0]
def _fs_name(name):
@ -737,26 +742,26 @@ def _fs_name(name):
'@' separates a snapshot name from the rest of the dataset name.
'#' separates a bookmark name from the rest of the dataset name.
'''
return re.split('[@#]', name, 1)[0]
return re.split(b'[@#]', name, 1)[0]
def _is_valid_name_component(component):
allowed = string.ascii_letters + string.digits + '-_.: '
return component and all(x in allowed for x in component)
allowed = string.ascii_letters + string.digits + u'-_.: '
return component and all(x in allowed.encode() for x in component)
def _is_valid_fs_name(name):
return name and all(_is_valid_name_component(c) for c in name.split('/'))
return name and all(_is_valid_name_component(c) for c in name.split(b'/'))
def _is_valid_snap_name(name):
parts = name.split('@')
parts = name.split(b'@')
return (len(parts) == 2 and _is_valid_fs_name(parts[0]) and
_is_valid_name_component(parts[1]))
def _is_valid_bmark_name(name):
parts = name.split('#')
parts = name.split(b'#')
return (len(parts) == 2 and _is_valid_fs_name(parts[0]) and
_is_valid_name_component(parts[1]))

View File

@ -26,6 +26,7 @@ increased convenience. Output parameters are not used and return values
are directly returned. Error conditions are signalled by exceptions
rather than by integer error codes.
"""
from __future__ import absolute_import, division, print_function
import errno
import functools
@ -112,7 +113,7 @@ def lzc_create(name, ds_type='zfs', props=None, key=None):
if props is None:
props = {}
if key is None:
key = bytes("")
key = b""
else:
key = bytes(key)
if ds_type == 'zfs':
@ -485,8 +486,8 @@ def lzc_hold(holds, fd=None):
errors.lzc_hold_translate_errors(ret, errlist, holds, fd)
# If there is no error (no exception raised by _handleErrList), but errlist
# is not empty, then it contains missing snapshots.
assert all(x == errno.ENOENT for x in errlist.itervalues())
return errlist.keys()
assert all(errlist[x] == errno.ENOENT for x in errlist)
return list(errlist.keys())
def lzc_release(holds):
@ -521,7 +522,8 @@ def lzc_release(holds):
'''
errlist = {}
holds_dict = {}
for snap, hold_list in holds.iteritems():
for snap in holds:
hold_list = holds[snap]
if not isinstance(hold_list, list):
raise TypeError('holds must be in a list')
holds_dict[snap] = {hold: None for hold in hold_list}
@ -531,8 +533,8 @@ def lzc_release(holds):
errors.lzc_release_translate_errors(ret, errlist, holds)
# If there is no error (no exception raised by _handleErrList), but errlist
# is not empty, then it contains missing snapshots and tags.
assert all(x == errno.ENOENT for x in errlist.itervalues())
return errlist.keys()
assert all(errlist[x] == errno.ENOENT for x in errlist)
return list(errlist.keys())
def lzc_get_holds(snapname):
@ -846,7 +848,7 @@ def lzc_change_key(fsname, crypt_cmd, props=None, key=None):
if props is None:
props = {}
if key is None:
key = bytes("")
key = b""
else:
key = bytes(key)
cmd = {
@ -929,13 +931,13 @@ def lzc_channel_program(
error.
'''
output = {}
params_nv = nvlist_in({"argv": params})
params_nv = nvlist_in({b"argv": params})
with nvlist_out(output) as outnvl:
ret = _lib.lzc_channel_program(
poolname, program, instrlimit, memlimit, params_nv, outnvl)
errors.lzc_channel_program_translate_error(
ret, poolname, output.get("error"))
return output.get("return")
ret, poolname, output.get(b"error"))
return output.get(b"return")
def lzc_channel_program_nosync(
@ -974,13 +976,13 @@ def lzc_channel_program_nosync(
error.
'''
output = {}
params_nv = nvlist_in({"argv": params})
params_nv = nvlist_in({b"argv": params})
with nvlist_out(output) as outnvl:
ret = _lib.lzc_channel_program_nosync(
poolname, program, instrlimit, memlimit, params_nv, outnvl)
errors.lzc_channel_program_translate_error(
ret, poolname, output.get("error"))
return output.get("return")
ret, poolname, output.get(b"error"))
return output.get(b"return")
def lzc_receive_resumable(
@ -1404,7 +1406,7 @@ def lzc_receive_with_cmdprops(
if cmdprops is None:
cmdprops = {}
if key is None:
key = bytes("")
key = b""
else:
key = bytes(key)
@ -1509,7 +1511,7 @@ def lzc_sync(poolname, force=False):
`innvl` has been replaced by the `force` boolean and `outnvl` has been
conveniently removed since it's not used.
'''
innvl = nvlist_in({"force": force})
innvl = nvlist_in({b"force": force})
with nvlist_out({}) as outnvl:
ret = _lib.lzc_sync(poolname, innvl, outnvl)
errors.lzc_sync_translate_error(ret, poolname)
@ -1873,9 +1875,9 @@ def lzc_get_props(name):
mountpoint_val = '/' + name
else:
mountpoint_val = None
result = {k: v['value'] for k, v in result.iteritems()}
result = {k: result[k]['value'] for k in result}
if 'clones' in result:
result['clones'] = result['clones'].keys()
result['clones'] = list(result['clones'].keys())
if mountpoint_val is not None:
result['mountpoint'] = mountpoint_val
return result

View File

@ -47,6 +47,7 @@ Format:
- a value can be a list of dictionaries that adhere to this format
- all elements of a list value must be of the same type
"""
from __future__ import absolute_import, division, print_function
import numbers
from collections import namedtuple
@ -159,10 +160,10 @@ def _type_info(typeid):
# only integer properties need to be here
_prop_name_to_type_str = {
"rewind-request": "uint32",
"type": "uint32",
"N_MORE_ERRORS": "int32",
"pool_context": "int32",
b"rewind-request": "uint32",
b"type": "uint32",
b"N_MORE_ERRORS": "int32",
b"pool_context": "int32",
}

View File

@ -19,6 +19,7 @@ The package that contains a module per each C library that
`libzfs_core` uses. The modules expose CFFI objects required
to make calls to functions in the libraries.
"""
from __future__ import absolute_import, division, print_function
import threading
import importlib
@ -47,7 +48,7 @@ def _setup_cffi():
ffi = FFI()
for module_name in MODULES:
module = importlib.import_module("." + module_name, __package__)
module = importlib.import_module("." + module_name, __name__)
ffi.cdef(module.CDEF)
lib = LazyLibrary(ffi, module.LIBRARY)
setattr(module, "ffi", ffi)

View File

@ -17,6 +17,7 @@
"""
Python bindings for ``libnvpair``.
"""
from __future__ import absolute_import, division, print_function
CDEF = """
typedef ... nvlist_t;

View File

@ -17,6 +17,7 @@
"""
Python bindings for ``libzfs_core``.
"""
from __future__ import absolute_import, division, print_function
CDEF = """

View File

@ -17,6 +17,7 @@
"""
Utility functions for casting to a specific C type.
"""
from __future__ import absolute_import, division, print_function
from .bindings.libnvpair import ffi as _ffi
@ -30,8 +31,8 @@ def _ffi_cast(type_name):
try:
type_info.elements[value]
except KeyError as e:
raise OverflowError('Invalid enum <%s> value %s' %
(type_info.cname, e.message))
raise OverflowError('Invalid enum <%s> value %s: %s' %
(type_info.cname, value, e))
else:
_ffi.new(type_name + '*', value)
return _ffi.cast(type_name, value)

View File

@ -17,6 +17,7 @@
"""
Exceptions that can be raised by libzfs_core operations.
"""
from __future__ import absolute_import, division, print_function
import errno
from ._constants import (

File diff suppressed because it is too large Load Diff

View File

@ -21,6 +21,7 @@ and verify that no information is lost and value types are correct.
The tests also check that various error conditions like unsupported
value types or out of bounds values are detected.
"""
from __future__ import absolute_import, division, print_function
import unittest
@ -43,25 +44,25 @@ class TestNVList(unittest.TestCase):
def _assertIntDictsEqual(self, dict1, dict2):
self.assertEqual(
len(dict1), len(dict1),
"resulting dictionary is of different size")
b"resulting dictionary is of different size")
for key in dict1.keys():
self.assertEqual(int(dict1[key]), int(dict2[key]))
def _assertIntArrayDictsEqual(self, dict1, dict2):
self.assertEqual(
len(dict1), len(dict1),
"resulting dictionary is of different size")
b"resulting dictionary is of different size")
for key in dict1.keys():
val1 = dict1[key]
val2 = dict2[key]
self.assertEqual(
len(val1), len(val2), "array values of different sizes")
len(val1), len(val2), b"array values of different sizes")
for x, y in zip(val1, val2):
self.assertEqual(int(x), int(y))
def test_empty(self):
res = self._dict_to_nvlist_to_dict({})
self.assertEqual(len(res), 0, "expected empty dict")
self.assertEqual(len(res), 0, b"expected empty dict")
def test_invalid_key_type(self):
with self.assertRaises(TypeError):
@ -69,564 +70,564 @@ class TestNVList(unittest.TestCase):
def test_invalid_val_type__tuple(self):
with self.assertRaises(TypeError):
self._dict_to_nvlist_to_dict({"key": (1, 2)})
self._dict_to_nvlist_to_dict({b"key": (1, 2)})
def test_invalid_val_type__set(self):
with self.assertRaises(TypeError):
self._dict_to_nvlist_to_dict({"key": set(1, 2)})
self._dict_to_nvlist_to_dict({b"key": set(1, 2)})
def test_invalid_array_val_type(self):
with self.assertRaises(TypeError):
self._dict_to_nvlist_to_dict({"key": [(1, 2), (3, 4)]})
self._dict_to_nvlist_to_dict({b"key": [(1, 2), (3, 4)]})
def test_invalid_array_of_arrays_val_type(self):
with self.assertRaises(TypeError):
self._dict_to_nvlist_to_dict({"key": [[1, 2], [3, 4]]})
self._dict_to_nvlist_to_dict({b"key": [[1, 2], [3, 4]]})
def test_string_value(self):
props = {"key": "value"}
props = {b"key": b"value"}
res = self._dict_to_nvlist_to_dict(props)
self.assertEqual(props, res)
def test_implicit_boolean_value(self):
props = {"key": None}
props = {b"key": None}
res = self._dict_to_nvlist_to_dict(props)
self.assertEqual(props, res)
def test_boolean_values(self):
props = {"key1": True, "key2": False}
props = {b"key1": True, b"key2": False}
res = self._dict_to_nvlist_to_dict(props)
self.assertEqual(props, res)
def test_explicit_boolean_true_value(self):
props = {"key": boolean_t(1)}
props = {b"key": boolean_t(1)}
res = self._dict_to_nvlist_to_dict(props)
self._assertIntDictsEqual(props, res)
def test_explicit_boolean_false_value(self):
props = {"key": boolean_t(0)}
props = {b"key": boolean_t(0)}
res = self._dict_to_nvlist_to_dict(props)
self._assertIntDictsEqual(props, res)
def test_explicit_boolean_invalid_value(self):
with self.assertRaises(OverflowError):
props = {"key": boolean_t(2)}
props = {b"key": boolean_t(2)}
self._dict_to_nvlist_to_dict(props)
def test_explicit_boolean_another_invalid_value(self):
with self.assertRaises(OverflowError):
props = {"key": boolean_t(-1)}
props = {b"key": boolean_t(-1)}
self._dict_to_nvlist_to_dict(props)
def test_uint64_value(self):
props = {"key": 1}
props = {b"key": 1}
res = self._dict_to_nvlist_to_dict(props)
self.assertEqual(props, res)
def test_uint64_max_value(self):
props = {"key": 2 ** 64 - 1}
props = {b"key": 2 ** 64 - 1}
res = self._dict_to_nvlist_to_dict(props)
self.assertEqual(props, res)
def test_uint64_too_large_value(self):
props = {"key": 2 ** 64}
props = {b"key": 2 ** 64}
with self.assertRaises(OverflowError):
self._dict_to_nvlist_to_dict(props)
def test_uint64_negative_value(self):
props = {"key": -1}
props = {b"key": -1}
with self.assertRaises(OverflowError):
self._dict_to_nvlist_to_dict(props)
def test_explicit_uint64_value(self):
props = {"key": uint64_t(1)}
props = {b"key": uint64_t(1)}
res = self._dict_to_nvlist_to_dict(props)
self._assertIntDictsEqual(props, res)
def test_explicit_uint64_max_value(self):
props = {"key": uint64_t(2 ** 64 - 1)}
props = {b"key": uint64_t(2 ** 64 - 1)}
res = self._dict_to_nvlist_to_dict(props)
self._assertIntDictsEqual(props, res)
def test_explicit_uint64_too_large_value(self):
with self.assertRaises(OverflowError):
props = {"key": uint64_t(2 ** 64)}
props = {b"key": uint64_t(2 ** 64)}
self._dict_to_nvlist_to_dict(props)
def test_explicit_uint64_negative_value(self):
with self.assertRaises(OverflowError):
props = {"key": uint64_t(-1)}
props = {b"key": uint64_t(-1)}
self._dict_to_nvlist_to_dict(props)
def test_explicit_uint32_value(self):
props = {"key": uint32_t(1)}
props = {b"key": uint32_t(1)}
res = self._dict_to_nvlist_to_dict(props)
self._assertIntDictsEqual(props, res)
def test_explicit_uint32_max_value(self):
props = {"key": uint32_t(2 ** 32 - 1)}
props = {b"key": uint32_t(2 ** 32 - 1)}
res = self._dict_to_nvlist_to_dict(props)
self._assertIntDictsEqual(props, res)
def test_explicit_uint32_too_large_value(self):
with self.assertRaises(OverflowError):
props = {"key": uint32_t(2 ** 32)}
props = {b"key": uint32_t(2 ** 32)}
self._dict_to_nvlist_to_dict(props)
def test_explicit_uint32_negative_value(self):
with self.assertRaises(OverflowError):
props = {"key": uint32_t(-1)}
props = {b"key": uint32_t(-1)}
self._dict_to_nvlist_to_dict(props)
def test_explicit_uint16_value(self):
props = {"key": uint16_t(1)}
props = {b"key": uint16_t(1)}
res = self._dict_to_nvlist_to_dict(props)
self._assertIntDictsEqual(props, res)
def test_explicit_uint16_max_value(self):
props = {"key": uint16_t(2 ** 16 - 1)}
props = {b"key": uint16_t(2 ** 16 - 1)}
res = self._dict_to_nvlist_to_dict(props)
self._assertIntDictsEqual(props, res)
def test_explicit_uint16_too_large_value(self):
with self.assertRaises(OverflowError):
props = {"key": uint16_t(2 ** 16)}
props = {b"key": uint16_t(2 ** 16)}
self._dict_to_nvlist_to_dict(props)
def test_explicit_uint16_negative_value(self):
with self.assertRaises(OverflowError):
props = {"key": uint16_t(-1)}
props = {b"key": uint16_t(-1)}
self._dict_to_nvlist_to_dict(props)
def test_explicit_uint8_value(self):
props = {"key": uint8_t(1)}
props = {b"key": uint8_t(1)}
res = self._dict_to_nvlist_to_dict(props)
self._assertIntDictsEqual(props, res)
def test_explicit_uint8_max_value(self):
props = {"key": uint8_t(2 ** 8 - 1)}
props = {b"key": uint8_t(2 ** 8 - 1)}
res = self._dict_to_nvlist_to_dict(props)
self._assertIntDictsEqual(props, res)
def test_explicit_uint8_too_large_value(self):
with self.assertRaises(OverflowError):
props = {"key": uint8_t(2 ** 8)}
props = {b"key": uint8_t(2 ** 8)}
self._dict_to_nvlist_to_dict(props)
def test_explicit_uint8_negative_value(self):
with self.assertRaises(OverflowError):
props = {"key": uint8_t(-1)}
props = {b"key": uint8_t(-1)}
self._dict_to_nvlist_to_dict(props)
def test_explicit_byte_value(self):
props = {"key": uchar_t(1)}
props = {b"key": uchar_t(1)}
res = self._dict_to_nvlist_to_dict(props)
self._assertIntDictsEqual(props, res)
def test_explicit_byte_max_value(self):
props = {"key": uchar_t(2 ** 8 - 1)}
props = {b"key": uchar_t(2 ** 8 - 1)}
res = self._dict_to_nvlist_to_dict(props)
self._assertIntDictsEqual(props, res)
def test_explicit_byte_too_large_value(self):
with self.assertRaises(OverflowError):
props = {"key": uchar_t(2 ** 8)}
props = {b"key": uchar_t(2 ** 8)}
self._dict_to_nvlist_to_dict(props)
def test_explicit_byte_negative_value(self):
with self.assertRaises(OverflowError):
props = {"key": uchar_t(-1)}
props = {b"key": uchar_t(-1)}
self._dict_to_nvlist_to_dict(props)
def test_explicit_int64_value(self):
props = {"key": int64_t(1)}
props = {b"key": int64_t(1)}
res = self._dict_to_nvlist_to_dict(props)
self._assertIntDictsEqual(props, res)
def test_explicit_int64_max_value(self):
props = {"key": int64_t(2 ** 63 - 1)}
props = {b"key": int64_t(2 ** 63 - 1)}
res = self._dict_to_nvlist_to_dict(props)
self._assertIntDictsEqual(props, res)
def test_explicit_int64_min_value(self):
props = {"key": int64_t(-(2 ** 63))}
props = {b"key": int64_t(-(2 ** 63))}
res = self._dict_to_nvlist_to_dict(props)
self._assertIntDictsEqual(props, res)
def test_explicit_int64_too_large_value(self):
with self.assertRaises(OverflowError):
props = {"key": int64_t(2 ** 63)}
props = {b"key": int64_t(2 ** 63)}
self._dict_to_nvlist_to_dict(props)
def test_explicit_int64_too_small_value(self):
with self.assertRaises(OverflowError):
props = {"key": int64_t(-(2 ** 63) - 1)}
props = {b"key": int64_t(-(2 ** 63) - 1)}
self._dict_to_nvlist_to_dict(props)
def test_explicit_int32_value(self):
props = {"key": int32_t(1)}
props = {b"key": int32_t(1)}
res = self._dict_to_nvlist_to_dict(props)
self._assertIntDictsEqual(props, res)
def test_explicit_int32_max_value(self):
props = {"key": int32_t(2 ** 31 - 1)}
props = {b"key": int32_t(2 ** 31 - 1)}
res = self._dict_to_nvlist_to_dict(props)
self._assertIntDictsEqual(props, res)
def test_explicit_int32_min_value(self):
props = {"key": int32_t(-(2 ** 31))}
props = {b"key": int32_t(-(2 ** 31))}
res = self._dict_to_nvlist_to_dict(props)
self._assertIntDictsEqual(props, res)
def test_explicit_int32_too_large_value(self):
with self.assertRaises(OverflowError):
props = {"key": int32_t(2 ** 31)}
props = {b"key": int32_t(2 ** 31)}
self._dict_to_nvlist_to_dict(props)
def test_explicit_int32_too_small_value(self):
with self.assertRaises(OverflowError):
props = {"key": int32_t(-(2 ** 31) - 1)}
props = {b"key": int32_t(-(2 ** 31) - 1)}
self._dict_to_nvlist_to_dict(props)
def test_explicit_int16_value(self):
props = {"key": int16_t(1)}
props = {b"key": int16_t(1)}
res = self._dict_to_nvlist_to_dict(props)
self._assertIntDictsEqual(props, res)
def test_explicit_int16_max_value(self):
props = {"key": int16_t(2 ** 15 - 1)}
props = {b"key": int16_t(2 ** 15 - 1)}
res = self._dict_to_nvlist_to_dict(props)
self._assertIntDictsEqual(props, res)
def test_explicit_int16_min_value(self):
props = {"key": int16_t(-(2 ** 15))}
props = {b"key": int16_t(-(2 ** 15))}
res = self._dict_to_nvlist_to_dict(props)
self._assertIntDictsEqual(props, res)
def test_explicit_int16_too_large_value(self):
with self.assertRaises(OverflowError):
props = {"key": int16_t(2 ** 15)}
props = {b"key": int16_t(2 ** 15)}
self._dict_to_nvlist_to_dict(props)
def test_explicit_int16_too_small_value(self):
with self.assertRaises(OverflowError):
props = {"key": int16_t(-(2 ** 15) - 1)}
props = {b"key": int16_t(-(2 ** 15) - 1)}
self._dict_to_nvlist_to_dict(props)
def test_explicit_int8_value(self):
props = {"key": int8_t(1)}
props = {b"key": int8_t(1)}
res = self._dict_to_nvlist_to_dict(props)
self._assertIntDictsEqual(props, res)
def test_explicit_int8_max_value(self):
props = {"key": int8_t(2 ** 7 - 1)}
props = {b"key": int8_t(2 ** 7 - 1)}
res = self._dict_to_nvlist_to_dict(props)
self._assertIntDictsEqual(props, res)
def test_explicit_int8_min_value(self):
props = {"key": int8_t(-(2 ** 7))}
props = {b"key": int8_t(-(2 ** 7))}
res = self._dict_to_nvlist_to_dict(props)
self._assertIntDictsEqual(props, res)
def test_explicit_int8_too_large_value(self):
with self.assertRaises(OverflowError):
props = {"key": int8_t(2 ** 7)}
props = {b"key": int8_t(2 ** 7)}
self._dict_to_nvlist_to_dict(props)
def test_explicit_int8_too_small_value(self):
with self.assertRaises(OverflowError):
props = {"key": int8_t(-(2 ** 7) - 1)}
props = {b"key": int8_t(-(2 ** 7) - 1)}
self._dict_to_nvlist_to_dict(props)
def test_nested_dict(self):
props = {"key": {}}
props = {b"key": {}}
res = self._dict_to_nvlist_to_dict(props)
self.assertEqual(props, res)
def test_nested_nested_dict(self):
props = {"key": {"key": {}}}
props = {b"key": {b"key": {}}}
res = self._dict_to_nvlist_to_dict(props)
self.assertEqual(props, res)
def test_mismatching_values_array(self):
props = {"key": [1, "string"]}
props = {b"key": [1, b"string"]}
with self.assertRaises(TypeError):
self._dict_to_nvlist_to_dict(props)
def test_mismatching_values_array2(self):
props = {"key": [True, 10]}
props = {b"key": [True, 10]}
with self.assertRaises(TypeError):
self._dict_to_nvlist_to_dict(props)
def test_mismatching_values_array3(self):
props = {"key": [1, False]}
props = {b"key": [1, False]}
with self.assertRaises(TypeError):
self._dict_to_nvlist_to_dict(props)
def test_string_array(self):
props = {"key": ["value", "value2"]}
props = {b"key": [b"value", b"value2"]}
res = self._dict_to_nvlist_to_dict(props)
self.assertEqual(props, res)
def test_boolean_array(self):
props = {"key": [True, False]}
props = {b"key": [True, False]}
res = self._dict_to_nvlist_to_dict(props)
self.assertEqual(props, res)
def test_explicit_boolean_array(self):
props = {"key": [boolean_t(False), boolean_t(True)]}
props = {b"key": [boolean_t(False), boolean_t(True)]}
res = self._dict_to_nvlist_to_dict(props)
self._assertIntArrayDictsEqual(props, res)
def test_uint64_array(self):
props = {"key": [0, 1, 2 ** 64 - 1]}
props = {b"key": [0, 1, 2 ** 64 - 1]}
res = self._dict_to_nvlist_to_dict(props)
self.assertEqual(props, res)
def test_uint64_array_too_large_value(self):
props = {"key": [0, 2 ** 64]}
props = {b"key": [0, 2 ** 64]}
with self.assertRaises(OverflowError):
self._dict_to_nvlist_to_dict(props)
def test_uint64_array_negative_value(self):
props = {"key": [0, -1]}
props = {b"key": [0, -1]}
with self.assertRaises(OverflowError):
self._dict_to_nvlist_to_dict(props)
def test_mixed_explict_int_array(self):
with self.assertRaises(TypeError):
props = {"key": [uint64_t(0), uint32_t(0)]}
props = {b"key": [uint64_t(0), uint32_t(0)]}
self._dict_to_nvlist_to_dict(props)
def test_explict_uint64_array(self):
props = {"key": [uint64_t(0), uint64_t(1), uint64_t(2 ** 64 - 1)]}
props = {b"key": [uint64_t(0), uint64_t(1), uint64_t(2 ** 64 - 1)]}
res = self._dict_to_nvlist_to_dict(props)
self._assertIntArrayDictsEqual(props, res)
def test_explict_uint64_array_too_large_value(self):
with self.assertRaises(OverflowError):
props = {"key": [uint64_t(0), uint64_t(2 ** 64)]}
props = {b"key": [uint64_t(0), uint64_t(2 ** 64)]}
self._dict_to_nvlist_to_dict(props)
def test_explict_uint64_array_negative_value(self):
with self.assertRaises(OverflowError):
props = {"key": [uint64_t(0), uint64_t(-1)]}
props = {b"key": [uint64_t(0), uint64_t(-1)]}
self._dict_to_nvlist_to_dict(props)
def test_explict_uint32_array(self):
props = {"key": [uint32_t(0), uint32_t(1), uint32_t(2 ** 32 - 1)]}
props = {b"key": [uint32_t(0), uint32_t(1), uint32_t(2 ** 32 - 1)]}
res = self._dict_to_nvlist_to_dict(props)
self._assertIntArrayDictsEqual(props, res)
def test_explict_uint32_array_too_large_value(self):
with self.assertRaises(OverflowError):
props = {"key": [uint32_t(0), uint32_t(2 ** 32)]}
props = {b"key": [uint32_t(0), uint32_t(2 ** 32)]}
self._dict_to_nvlist_to_dict(props)
def test_explict_uint32_array_negative_value(self):
with self.assertRaises(OverflowError):
props = {"key": [uint32_t(0), uint32_t(-1)]}
props = {b"key": [uint32_t(0), uint32_t(-1)]}
self._dict_to_nvlist_to_dict(props)
def test_explict_uint16_array(self):
props = {"key": [uint16_t(0), uint16_t(1), uint16_t(2 ** 16 - 1)]}
props = {b"key": [uint16_t(0), uint16_t(1), uint16_t(2 ** 16 - 1)]}
res = self._dict_to_nvlist_to_dict(props)
self._assertIntArrayDictsEqual(props, res)
def test_explict_uint16_array_too_large_value(self):
with self.assertRaises(OverflowError):
props = {"key": [uint16_t(0), uint16_t(2 ** 16)]}
props = {b"key": [uint16_t(0), uint16_t(2 ** 16)]}
self._dict_to_nvlist_to_dict(props)
def test_explict_uint16_array_negative_value(self):
with self.assertRaises(OverflowError):
props = {"key": [uint16_t(0), uint16_t(-1)]}
props = {b"key": [uint16_t(0), uint16_t(-1)]}
self._dict_to_nvlist_to_dict(props)
def test_explict_uint8_array(self):
props = {"key": [uint8_t(0), uint8_t(1), uint8_t(2 ** 8 - 1)]}
props = {b"key": [uint8_t(0), uint8_t(1), uint8_t(2 ** 8 - 1)]}
res = self._dict_to_nvlist_to_dict(props)
self._assertIntArrayDictsEqual(props, res)
def test_explict_uint8_array_too_large_value(self):
with self.assertRaises(OverflowError):
props = {"key": [uint8_t(0), uint8_t(2 ** 8)]}
props = {b"key": [uint8_t(0), uint8_t(2 ** 8)]}
self._dict_to_nvlist_to_dict(props)
def test_explict_uint8_array_negative_value(self):
with self.assertRaises(OverflowError):
props = {"key": [uint8_t(0), uint8_t(-1)]}
props = {b"key": [uint8_t(0), uint8_t(-1)]}
self._dict_to_nvlist_to_dict(props)
def test_explict_byte_array(self):
props = {"key": [uchar_t(0), uchar_t(1), uchar_t(2 ** 8 - 1)]}
props = {b"key": [uchar_t(0), uchar_t(1), uchar_t(2 ** 8 - 1)]}
res = self._dict_to_nvlist_to_dict(props)
self._assertIntArrayDictsEqual(props, res)
def test_explict_byte_array_too_large_value(self):
with self.assertRaises(OverflowError):
props = {"key": [uchar_t(0), uchar_t(2 ** 8)]}
props = {b"key": [uchar_t(0), uchar_t(2 ** 8)]}
self._dict_to_nvlist_to_dict(props)
def test_explict_byte_array_negative_value(self):
with self.assertRaises(OverflowError):
props = {"key": [uchar_t(0), uchar_t(-1)]}
props = {b"key": [uchar_t(0), uchar_t(-1)]}
self._dict_to_nvlist_to_dict(props)
def test_explict_int64_array(self):
props = {"key": [
props = {b"key": [
int64_t(0), int64_t(1), int64_t(2 ** 63 - 1), int64_t(-(2 ** 63))]}
res = self._dict_to_nvlist_to_dict(props)
self._assertIntArrayDictsEqual(props, res)
def test_explict_int64_array_too_large_value(self):
with self.assertRaises(OverflowError):
props = {"key": [int64_t(0), int64_t(2 ** 63)]}
props = {b"key": [int64_t(0), int64_t(2 ** 63)]}
self._dict_to_nvlist_to_dict(props)
def test_explict_int64_array_too_small_value(self):
with self.assertRaises(OverflowError):
props = {"key": [int64_t(0), int64_t(-(2 ** 63) - 1)]}
props = {b"key": [int64_t(0), int64_t(-(2 ** 63) - 1)]}
self._dict_to_nvlist_to_dict(props)
def test_explict_int32_array(self):
props = {"key": [
props = {b"key": [
int32_t(0), int32_t(1), int32_t(2 ** 31 - 1), int32_t(-(2 ** 31))]}
res = self._dict_to_nvlist_to_dict(props)
self._assertIntArrayDictsEqual(props, res)
def test_explict_int32_array_too_large_value(self):
with self.assertRaises(OverflowError):
props = {"key": [int32_t(0), int32_t(2 ** 31)]}
props = {b"key": [int32_t(0), int32_t(2 ** 31)]}
self._dict_to_nvlist_to_dict(props)
def test_explict_int32_array_too_small_value(self):
with self.assertRaises(OverflowError):
props = {"key": [int32_t(0), int32_t(-(2 ** 31) - 1)]}
props = {b"key": [int32_t(0), int32_t(-(2 ** 31) - 1)]}
self._dict_to_nvlist_to_dict(props)
def test_explict_int16_array(self):
props = {"key": [
props = {b"key": [
int16_t(0), int16_t(1), int16_t(2 ** 15 - 1), int16_t(-(2 ** 15))]}
res = self._dict_to_nvlist_to_dict(props)
self._assertIntArrayDictsEqual(props, res)
def test_explict_int16_array_too_large_value(self):
with self.assertRaises(OverflowError):
props = {"key": [int16_t(0), int16_t(2 ** 15)]}
props = {b"key": [int16_t(0), int16_t(2 ** 15)]}
self._dict_to_nvlist_to_dict(props)
def test_explict_int16_array_too_small_value(self):
with self.assertRaises(OverflowError):
props = {"key": [int16_t(0), int16_t(-(2 ** 15) - 1)]}
props = {b"key": [int16_t(0), int16_t(-(2 ** 15) - 1)]}
self._dict_to_nvlist_to_dict(props)
def test_explict_int8_array(self):
props = {"key": [
props = {b"key": [
int8_t(0), int8_t(1), int8_t(2 ** 7 - 1), int8_t(-(2 ** 7))]}
res = self._dict_to_nvlist_to_dict(props)
self._assertIntArrayDictsEqual(props, res)
def test_explict_int8_array_too_large_value(self):
with self.assertRaises(OverflowError):
props = {"key": [int8_t(0), int8_t(2 ** 7)]}
props = {b"key": [int8_t(0), int8_t(2 ** 7)]}
self._dict_to_nvlist_to_dict(props)
def test_explict_int8_array_too_small_value(self):
with self.assertRaises(OverflowError):
props = {"key": [int8_t(0), int8_t(-(2 ** 7) - 1)]}
props = {b"key": [int8_t(0), int8_t(-(2 ** 7) - 1)]}
self._dict_to_nvlist_to_dict(props)
def test_dict_array(self):
props = {"key": [{"key": 1}, {"key": None}, {"key": {}}]}
props = {b"key": [{b"key": 1}, {b"key": None}, {b"key": {}}]}
res = self._dict_to_nvlist_to_dict(props)
self.assertEqual(props, res)
def test_implicit_uint32_value(self):
props = {"rewind-request": 1}
props = {b"rewind-request": 1}
res = self._dict_to_nvlist_to_dict(props)
self._assertIntDictsEqual(props, res)
def test_implicit_uint32_max_value(self):
props = {"rewind-request": 2 ** 32 - 1}
props = {b"rewind-request": 2 ** 32 - 1}
res = self._dict_to_nvlist_to_dict(props)
self._assertIntDictsEqual(props, res)
def test_implicit_uint32_too_large_value(self):
with self.assertRaises(OverflowError):
props = {"rewind-request": 2 ** 32}
props = {b"rewind-request": 2 ** 32}
self._dict_to_nvlist_to_dict(props)
def test_implicit_uint32_negative_value(self):
with self.assertRaises(OverflowError):
props = {"rewind-request": -1}
props = {b"rewind-request": -1}
self._dict_to_nvlist_to_dict(props)
def test_implicit_int32_value(self):
props = {"pool_context": 1}
props = {b"pool_context": 1}
res = self._dict_to_nvlist_to_dict(props)
self._assertIntDictsEqual(props, res)
def test_implicit_int32_max_value(self):
props = {"pool_context": 2 ** 31 - 1}
props = {b"pool_context": 2 ** 31 - 1}
res = self._dict_to_nvlist_to_dict(props)
self._assertIntDictsEqual(props, res)
def test_implicit_int32_min_value(self):
props = {"pool_context": -(2 ** 31)}
props = {b"pool_context": -(2 ** 31)}
res = self._dict_to_nvlist_to_dict(props)
self._assertIntDictsEqual(props, res)
def test_implicit_int32_too_large_value(self):
with self.assertRaises(OverflowError):
props = {"pool_context": 2 ** 31}
props = {b"pool_context": 2 ** 31}
self._dict_to_nvlist_to_dict(props)
def test_implicit_int32_too_small_value(self):
with self.assertRaises(OverflowError):
props = {"pool_context": -(2 ** 31) - 1}
props = {b"pool_context": -(2 ** 31) - 1}
self._dict_to_nvlist_to_dict(props)
def test_complex_dict(self):
props = {
"key1": "str",
"key2": 10,
"key3": {
"skey1": True,
"skey2": None,
"skey3": [
b"key1": b"str",
b"key2": 10,
b"key3": {
b"skey1": True,
b"skey2": None,
b"skey3": [
True,
False,
True
]
},
"key4": [
"ab",
"bc"
b"key4": [
b"ab",
b"bc"
],
"key5": [
b"key5": [
2 ** 64 - 1,
1,
2,
3
],
"key6": [
b"key6": [
{
"skey71": "a",
"skey72": "b",
b"skey71": b"a",
b"skey72": b"b",
},
{
"skey71": "c",
"skey72": "d",
b"skey71": b"c",
b"skey72": b"d",
},
{
"skey71": "e",
"skey72": "f",
b"skey71": b"e",
b"skey72": b"f",
}
],
"type": 2 ** 32 - 1,
"pool_context": -(2 ** 31)
b"type": 2 ** 32 - 1,
b"pool_context": -(2 ** 31)
}
res = self._dict_to_nvlist_to_dict(props)
self.assertEqual(props, res)

View File

@ -13,6 +13,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import absolute_import, division, print_function
from setuptools import setup, find_packages
@ -28,8 +29,13 @@ setup(
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Programming Language :: Python :: 2 :: Only",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Topic :: System :: Filesystems",
"Topic :: Software Development :: Libraries",
],
@ -47,7 +53,7 @@ setup(
setup_requires=[
"cffi",
],
python_requires='>=2.7,<3',
python_requires='>=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,<4',
zip_safe=False,
test_suite="libzfs_core.test",
)

View File

@ -53,16 +53,6 @@
%bcond_with asan
%bcond_with systemd
# Python permits the !/usr/bin/python shebang for scripts that are cross
# compatible between python2 and python3, but Fedora 28 does not. Fedora
# wants us to choose python3 for cross-compatible scripts. Since we want
# to support python2 and python3 users, exclude our scripts from Fedora 28's
# RPM build check, so that we don't get a bunch of build warnings.
#
# Details: https://github.com/zfsonlinux/zfs/issues/7360
#
%global __brp_mangle_shebangs_exclude_from arc_summary.py|arcstat.py|dbufstat.py|test-runner.py|zts-report.py
# Generic enable switch for systemd
%if %{with systemd}
%define _systemd 1
@ -85,6 +75,32 @@
%define _systemd 1
%endif
# When not specified default to distribution provided version. This
# is normally Python 3, but for RHEL <= 7 only Python 2 is provided.
%if %{undefined __use_python}
%if 0%{?rhel} && 0%{?rhel} <= 7
%define __python /usr/bin/python2
%define __python_pkg_version 2
%define __python_cffi_pkg python-cffi
%else
%define __python /usr/bin/python3
%define __python_pkg_version 3
%define __python_cffi_pkg python3-cffi
%endif
%else
%define __python %{__use_python}
%define __python_pkg_version %{__use_python_pkg_version}
%define __python_cffi_pkg python%{__python_pkg_version}-cffi
%endif
# By default python-pyzfs is enabled, with the exception of
# RHEL 6 which by default uses Python 2.6 which is too old.
%if 0%{?rhel} == 6
%bcond_with pyzfs
%else
%bcond_without pyzfs
%endif
Name: @PACKAGE@
Version: @VERSION@
Release: @RELEASE@%{?dist}
@ -135,7 +151,7 @@ Requires: util-linux
Requires: sysstat
%description
This package contains the ZFS command line utilities.
This package contains the core ZFS command line utilities.
%package -n libzpool2
Summary: Native ZFS pool library for Linux
@ -219,6 +235,7 @@ Requires: acl
Requires: sudo
Requires: sysstat
Requires: libaio
Requires: python%{__python_pkg_version}
%if 0%{?rhel}%{?fedora}%{?suse_version}
BuildRequires: libaio-devel
%endif
@ -240,23 +257,23 @@ Requires: grep
This package contains a dracut module used to construct an initramfs
image which is ZFS aware.
%if 0%{?_pyzfs}
%package -n pyzfs
Summary: Python wrapper for libzfs_core
%if %{with pyzfs}
%package -n python%{__python_pkg_version}-pyzfs
Summary: Python %{python_version} wrapper for libzfs_core
Group: Development/Languages/Python
License: Apache-2.0
BuildArch: noarch
Requires: libzfs2 = %{version}
Requires: libnvpair1 = %{version}
Requires: libffi
Requires: python >= 2.7
Requires: python-cffi
Requires: python%{__python_pkg_version}
Requires: %{__python_cffi_pkg}
%if 0%{?rhel}%{?fedora}%{?suse_version}
BuildRequires: python-devel
BuildRequires: python%{__python_pkg_version}-devel
BuildRequires: libffi-devel
%endif
%description -n pyzfs
%description -n python%{__python_pkg_version}-pyzfs
This package provides a python wrapper for the libzfs_core C library.
%endif
@ -299,6 +316,12 @@ image which is ZFS aware.
%define systemd --enable-sysvinit --disable-systemd
%endif
%if %{with pyzfs}
%define pyzfs --enable-pyzfs
%else
%define pyzfs --disable-pyzfs
%endif
%setup -q
%build
@ -307,11 +330,13 @@ image which is ZFS aware.
--with-udevdir=%{_udevdir} \
--with-udevruledir=%{_udevruledir} \
--with-dracutdir=%{_dracutdir} \
--with-python=%{__python} \
--disable-static \
%{debug} \
%{debuginfo} \
%{asan} \
%{systemd}
%{systemd}\
%{pyzfs}
make %{?_smp_mflags}
%install
@ -379,12 +404,20 @@ systemctl --system daemon-reload >/dev/null || true
%endif
%files
# Core utilities
%{_sbindir}/*
%{_bindir}/*
%{_libexecdir}/%{name}
%{_bindir}/raidz_test
%{_bindir}/zgenhostid
# Optional Python 2/3 scripts
%{_bindir}/arc_summary
%{_bindir}/arcstat
%{_bindir}/dbufstat
# Man pages
%{_mandir}/man1/*
%{_mandir}/man5/*
%{_mandir}/man8/*
# Configuration files and scripts
%{_libexecdir}/%{name}
%{_udevdir}/vdev_id
%{_udevdir}/zvol_id
%{_udevdir}/rules.d/*
@ -426,8 +459,8 @@ systemctl --system daemon-reload >/dev/null || true
%doc contrib/dracut/README.dracut.markdown
%{_dracutdir}/modules.d/*
%if 0%{?_pyzfs}
%files -n pyzfs
%if %{with pyzfs}
%files -n python%{__python_pkg_version}-pyzfs
%doc contrib/pyzfs/README
%doc contrib/pyzfs/LICENSE
%defattr(-,root,root,-)

View File

@ -247,6 +247,10 @@ constrain_path() {
ln -fs "$STF_PATH/gunzip" "$STF_PATH/uncompress"
ln -fs "$STF_PATH/exportfs" "$STF_PATH/share"
ln -fs "$STF_PATH/exportfs" "$STF_PATH/unshare"
if [ -L "$STF_PATH/arc_summary3" ]; then
ln -fs "$STF_PATH/arc_summary3" "$STF_PATH/arc_summary"
fi
}
#

View File

@ -477,8 +477,7 @@ tests = ['zdb_001_neg', 'zfs_001_neg', 'zfs_allow_001_neg',
'zpool_offline_001_neg', 'zpool_online_001_neg', 'zpool_remove_001_neg',
'zpool_replace_001_neg', 'zpool_scrub_001_neg', 'zpool_set_001_neg',
'zpool_status_001_neg', 'zpool_upgrade_001_neg', 'arcstat_001_pos',
'arc_summary_001_pos', 'arc_summary_002_neg',
'arc_summary3_001_pos', 'dbufstat_001_pos']
'arc_summary_001_pos', 'arc_summary_002_neg', 'dbufstat_001_pos']
user =
tags = ['functional', 'cli_user', 'misc']

View File

@ -2,3 +2,14 @@ pkgdatadir = $(datadir)/@PACKAGE@/test-runner/bin
dist_pkgdata_SCRIPTS = \
test-runner.py \
zts-report.py
#
# These scripts are compatibile with both Python 2.6 and 3.4. As such the
# python 3 shebang can be replaced at install time when targeting a python
# 2 system. This allows us to maintain a single version of the source.
#
if USING_PYTHON_2
install-data-hook:
sed --in-place 's|^#!/usr/bin/python3|#!/usr/bin/python2|' \
$(DESTDIR)$(pkgdatadir)/test-runner.py \
$(DESTDIR)$(pkgdatadir)/zts-report.py
endif

View File

@ -12,9 +12,11 @@
#
#
# Copyright (c) 2012, 2015 by Delphix. All rights reserved.
# Copyright (c) 2012, 2018 by Delphix. All rights reserved.
# Copyright (c) 2017 Datto Inc.
#
# This script must remain compatible with Python 2.6+ and Python 3.4+.
#
# some python 2.7 system don't have a configparser shim
try:
@ -23,7 +25,8 @@ except ImportError:
import ConfigParser as configparser
import os
import logging
import sys
from datetime import datetime
from optparse import OptionParser
from pwd import getpwnam
@ -31,8 +34,6 @@ from pwd import getpwuid
from select import select
from subprocess import PIPE
from subprocess import Popen
from sys import argv
from sys import maxsize
from threading import Timer
from time import time
@ -41,6 +42,10 @@ TESTDIR = '/usr/share/zfs/'
KILL = 'kill'
TRUE = 'true'
SUDO = 'sudo'
LOG_FILE = 'LOG_FILE'
LOG_OUT = 'LOG_OUT'
LOG_ERR = 'LOG_ERR'
LOG_FILE_OBJ = None
class Result(object):
@ -84,7 +89,7 @@ class Output(object):
"""
def __init__(self, stream):
self.stream = stream
self._buf = ''
self._buf = b''
self.lines = []
def fileno(self):
@ -109,15 +114,15 @@ class Output(object):
buf = os.read(fd, 4096)
if not buf:
return None
if '\n' not in buf:
if b'\n' not in buf:
self._buf += buf
return []
buf = self._buf + buf
tmp, rest = buf.rsplit('\n', 1)
tmp, rest = buf.rsplit(b'\n', 1)
self._buf = rest
now = datetime.now()
rows = tmp.split('\n')
rows = tmp.split(b'\n')
self.lines += [(now, r) for r in rows]
@ -225,7 +230,7 @@ class Cmd(object):
proc = Popen(privcmd, stdout=PIPE, stderr=PIPE)
# Allow a special timeout value of 0 to mean infinity
if int(self.timeout) == 0:
self.timeout = maxsize
self.timeout = sys.maxsize
t = Timer(int(self.timeout), self.kill_cmd, [proc])
try:
@ -252,50 +257,52 @@ class Cmd(object):
self.result.runtime = '%02d:%02d' % (m, s)
self.result.result = 'SKIP'
def log(self, logger, options):
def log(self, options):
"""
This function is responsible for writing all output. This includes
the console output, the logfile of all results (with timestamped
merged stdout and stderr), and for each test, the unmodified
stdout/stderr/merged in it's own file.
"""
if logger is None:
return
logname = getpwuid(os.getuid()).pw_name
user = ' (run as %s)' % (self.user if len(self.user) else logname)
msga = 'Test: %s%s ' % (self.pathname, user)
msgb = '[%s] [%s]' % (self.result.runtime, self.result.result)
msgb = '[%s] [%s]\n' % (self.result.runtime, self.result.result)
pad = ' ' * (80 - (len(msga) + len(msgb)))
result_line = msga + pad + msgb
# If -q is specified, only print a line for tests that didn't pass.
# This means passing tests need to be logged as DEBUG, or the one
# line summary will only be printed in the logfile for failures.
# The result line is always written to the log file. If -q was
# specified only failures are written to the console, otherwise
# the result line is written to the console.
write_log(bytearray(result_line, encoding='utf-8'), LOG_FILE)
if not options.quiet:
logger.info('%s%s%s' % (msga, pad, msgb))
elif self.result.result is not 'PASS':
logger.info('%s%s%s' % (msga, pad, msgb))
else:
logger.debug('%s%s%s' % (msga, pad, msgb))
write_log(result_line, LOG_OUT)
elif options.quiet and self.result.result is not 'PASS':
write_log(result_line, LOG_OUT)
lines = sorted(self.result.stdout + self.result.stderr,
key=lambda x: x[0])
# Write timestamped output (stdout and stderr) to the logfile
for dt, line in lines:
logger.debug('%s %s' % (dt.strftime("%H:%M:%S.%f ")[:11], line))
timestamp = bytearray(dt.strftime("%H:%M:%S.%f ")[:11],
encoding='utf-8')
write_log(b'%s %s\n' % (timestamp, line), LOG_FILE)
# Write the separate stdout/stderr/merged files, if the data exists
if len(self.result.stdout):
with open(os.path.join(self.outputdir, 'stdout'), 'w') as out:
with open(os.path.join(self.outputdir, 'stdout'), 'wb') as out:
for _, line in self.result.stdout:
os.write(out.fileno(), '%s\n' % line)
os.write(out.fileno(), b'%s\n' % line)
if len(self.result.stderr):
with open(os.path.join(self.outputdir, 'stderr'), 'w') as err:
with open(os.path.join(self.outputdir, 'stderr'), 'wb') as err:
for _, line in self.result.stderr:
os.write(err.fileno(), '%s\n' % line)
os.write(err.fileno(), b'%s\n' % line)
if len(self.result.stdout) and len(self.result.stderr):
with open(os.path.join(self.outputdir, 'merged'), 'w') as merged:
with open(os.path.join(self.outputdir, 'merged'), 'wb') as merged:
for _, line in lines:
os.write(merged.fileno(), '%s\n' % line)
os.write(merged.fileno(), b'%s\n' % line)
class Test(Cmd):
@ -323,7 +330,7 @@ class Test(Cmd):
(self.pathname, self.outputdir, self.timeout, self.pre,
pre_user, self.post, post_user, self.user, self.tags)
def verify(self, logger):
def verify(self):
"""
Check the pre/post scripts, user and Test. Omit the Test from this
run if there are any problems.
@ -333,19 +340,19 @@ class Test(Cmd):
for f in [f for f in files if len(f)]:
if not verify_file(f):
logger.info("Warning: Test '%s' not added to this run because"
" it failed verification." % f)
write_log("Warning: Test '%s' not added to this run because"
" it failed verification.\n" % f, LOG_ERR)
return False
for user in [user for user in users if len(user)]:
if not verify_user(user, logger):
logger.info("Not adding Test '%s' to this run." %
self.pathname)
if not verify_user(user):
write_log("Not adding Test '%s' to this run.\n" %
self.pathname, LOG_ERR)
return False
return True
def run(self, logger, options):
def run(self, options):
"""
Create Cmd instances for the pre/post scripts. If the pre script
doesn't pass, skip this Test. Run the post script regardless.
@ -363,18 +370,18 @@ class Test(Cmd):
if len(pretest.pathname):
pretest.run(options)
cont = pretest.result.result is 'PASS'
pretest.log(logger, options)
pretest.log(options)
if cont:
test.run(options)
else:
test.skip()
test.log(logger, options)
test.log(options)
if len(posttest.pathname):
posttest.run(options)
posttest.log(logger, options)
posttest.log(options)
class TestGroup(Test):
@ -398,7 +405,7 @@ class TestGroup(Test):
(self.pathname, self.outputdir, self.tests, self.timeout,
self.pre, pre_user, self.post, post_user, self.user, self.tags)
def verify(self, logger):
def verify(self):
"""
Check the pre/post scripts, user and tests in this TestGroup. Omit
the TestGroup entirely, or simply delete the relevant tests in the
@ -416,34 +423,34 @@ class TestGroup(Test):
for f in [f for f in auxfiles if len(f)]:
if self.pathname != os.path.dirname(f):
logger.info("Warning: TestGroup '%s' not added to this run. "
"Auxiliary script '%s' exists in a different "
"directory." % (self.pathname, f))
write_log("Warning: TestGroup '%s' not added to this run. "
"Auxiliary script '%s' exists in a different "
"directory.\n" % (self.pathname, f), LOG_ERR)
return False
if not verify_file(f):
logger.info("Warning: TestGroup '%s' not added to this run. "
"Auxiliary script '%s' failed verification." %
(self.pathname, f))
write_log("Warning: TestGroup '%s' not added to this run. "
"Auxiliary script '%s' failed verification.\n" %
(self.pathname, f), LOG_ERR)
return False
for user in [user for user in users if len(user)]:
if not verify_user(user, logger):
logger.info("Not adding TestGroup '%s' to this run." %
self.pathname)
if not verify_user(user):
write_log("Not adding TestGroup '%s' to this run.\n" %
self.pathname, LOG_ERR)
return False
# If one of the tests is invalid, delete it, log it, and drive on.
for test in self.tests:
if not verify_file(os.path.join(self.pathname, test)):
del self.tests[self.tests.index(test)]
logger.info("Warning: Test '%s' removed from TestGroup '%s' "
"because it failed verification." %
(test, self.pathname))
write_log("Warning: Test '%s' removed from TestGroup '%s' "
"because it failed verification.\n" %
(test, self.pathname), LOG_ERR)
return len(self.tests) is not 0
def run(self, logger, options):
def run(self, options):
"""
Create Cmd instances for the pre/post scripts. If the pre script
doesn't pass, skip all the tests in this TestGroup. Run the post
@ -464,7 +471,7 @@ class TestGroup(Test):
if len(pretest.pathname):
pretest.run(options)
cont = pretest.result.result is 'PASS'
pretest.log(logger, options)
pretest.log(options)
for fname in self.tests:
test = Cmd(os.path.join(self.pathname, fname),
@ -475,11 +482,11 @@ class TestGroup(Test):
else:
test.skip()
test.log(logger, options)
test.log(options)
if len(posttest.pathname):
posttest.run(options)
posttest.log(logger, options)
posttest.log(options)
class TestRun(object):
@ -491,7 +498,7 @@ class TestRun(object):
self.starttime = time()
self.timestamp = datetime.now().strftime('%Y%m%dT%H%M%S')
self.outputdir = os.path.join(options.outputdir, self.timestamp)
self.logger = self.setup_logging(options)
self.setup_logging(options)
self.defaults = [
('outputdir', BASEDIR),
('quiet', False),
@ -524,7 +531,7 @@ class TestRun(object):
for prop in Test.props:
setattr(test, prop, getattr(options, prop))
if test.verify(self.logger):
if test.verify():
self.tests[pathname] = test
def addtestgroup(self, dirname, filenames, options):
@ -546,9 +553,9 @@ class TestRun(object):
self.testgroups[dirname] = testgroup
self.testgroups[dirname].tests = sorted(filenames)
testgroup.verify(self.logger)
testgroup.verify()
def read(self, logger, options):
def read(self, options):
"""
Read in the specified runfile, and apply the TestRun properties
listed in the 'DEFAULT' section to our TestRun. Then read each
@ -589,7 +596,7 @@ class TestRun(object):
# Repopulate tests using eval to convert the string to a list
testgroup.tests = eval(config.get(section, 'tests'))
if testgroup.verify(logger):
if testgroup.verify():
self.testgroups[section] = testgroup
else:
test = Test(section)
@ -598,7 +605,7 @@ class TestRun(object):
if config.has_option(sect, prop):
setattr(test, prop, config.get(sect, prop))
if test.verify(logger):
if test.verify():
self.tests[section] = test
def write(self, options):
@ -661,42 +668,23 @@ class TestRun(object):
def setup_logging(self, options):
"""
Two loggers are set up here. The first is for the logfile which
will contain one line summarizing the test, including the test
name, result, and running time. This logger will also capture the
timestamped combined stdout and stderr of each run. The second
logger is optional console output, which will contain only the one
line summary. The loggers are initialized at two different levels
to facilitate segregating the output.
This funtion creates the output directory and gets a file object
for the logfile. This function must be called before write_log()
can be used.
"""
if options.dryrun is True:
return
testlogger = logging.getLogger(__name__)
testlogger.setLevel(logging.DEBUG)
global LOG_FILE_OBJ
if options.cmd is not 'wrconfig':
try:
old = os.umask(0)
os.makedirs(self.outputdir, mode=0o777)
os.umask(old)
filename = os.path.join(self.outputdir, 'log')
LOG_FILE_OBJ = open(filename, buffering=0, mode='wb')
except OSError as e:
fail('%s' % e)
filename = os.path.join(self.outputdir, 'log')
logfile = logging.FileHandler(filename)
logfile.setLevel(logging.DEBUG)
logfilefmt = logging.Formatter('%(message)s')
logfile.setFormatter(logfilefmt)
testlogger.addHandler(logfile)
cons = logging.StreamHandler()
cons.setLevel(logging.INFO)
consfmt = logging.Formatter('%(message)s')
cons.setFormatter(consfmt)
testlogger.addHandler(cons)
return testlogger
def run(self, options):
"""
@ -713,14 +701,14 @@ class TestRun(object):
if not os.path.exists(logsymlink):
os.symlink(self.outputdir, logsymlink)
else:
print('Could not make a symlink to directory %s' % (
self.outputdir))
write_log('Could not make a symlink to directory %s\n' %
self.outputdir, LOG_ERR)
iteration = 0
while iteration < options.iterations:
for test in sorted(self.tests.keys()):
self.tests[test].run(self.logger, options)
self.tests[test].run(options)
for testgroup in sorted(self.testgroups.keys()):
self.testgroups[testgroup].run(self.logger, options)
self.testgroups[testgroup].run(options)
iteration += 1
def summary(self):
@ -748,6 +736,23 @@ class TestRun(object):
return 0
def write_log(msg, target):
"""
Write the provided message to standard out, standard error or
the logfile. If specifying LOG_FILE, then `msg` must be a bytes
like object. This way we can still handle output from tests that
may be in unexpected encodings.
"""
if target == LOG_OUT:
os.write(sys.stdout.fileno(), bytearray(msg, encoding='utf-8'))
elif target == LOG_ERR:
os.write(sys.stderr.fileno(), bytearray(msg, encoding='utf-8'))
elif target == LOG_FILE:
os.write(LOG_FILE_OBJ.fileno(), msg)
else:
fail('log_msg called with unknown target "%s"' % target)
def verify_file(pathname):
"""
Verify that the supplied pathname is an executable regular file.
@ -763,7 +768,7 @@ def verify_file(pathname):
return False
def verify_user(user, logger):
def verify_user(user):
"""
Verify that the specified user exists on this system, and can execute
sudo without being prompted for a password.
@ -776,13 +781,15 @@ def verify_user(user, logger):
try:
getpwnam(user)
except KeyError:
logger.info("Warning: user '%s' does not exist.", user)
write_log("Warning: user '%s' does not exist.\n" % user,
LOG_ERR)
return False
p = Popen(testcmd)
p.wait()
if p.returncode is not 0:
logger.info("Warning: user '%s' cannot use passwordless sudo.", user)
write_log("Warning: user '%s' cannot use passwordless sudo.\n" % user,
LOG_ERR)
return False
else:
Cmd.verified_users.append(user)
@ -810,7 +817,7 @@ def find_tests(testrun, options):
def fail(retstr, ret=1):
print('%s: %s' % (argv[0], retstr))
print('%s: %s' % (sys.argv[0], retstr))
exit(ret)
@ -900,7 +907,7 @@ def main():
if options.cmd is 'runtests':
find_tests(testrun, options)
elif options.cmd is 'rdconfig':
testrun.read(testrun.logger, options)
testrun.read(options)
elif options.cmd is 'wrconfig':
find_tests(testrun, options)
testrun.write(options)

View File

@ -1,4 +1,4 @@
#!/usr/bin/python
#!/usr/bin/python3
#
# This file and its contents are supplied under the terms of the
@ -15,6 +15,8 @@
# Copyright (c) 2017 by Delphix. All rights reserved.
# Copyright (c) 2018 by Lawrence Livermore National Security, LLC.
#
# This script must remain compatible with Python 2.6+ and Python 3.4+.
#
import os
import re

View File

@ -146,10 +146,10 @@ export ZFS_FILES='zdb
zpool
ztest
raidz_test
arc_summary.py
arc_summary3.py
arcstat.py
dbufstat.py
arc_summary
arc_summary3
arcstat
dbufstat
zed
zgenhostid
zstreamdump'

View File

@ -37,7 +37,7 @@
# 2. Store output from dbufs kstat
# 3. Store output from dbufstats kstat
# 4. Compare stats presented in dbufstats with stat generated using
# dbufstat.py and the dbufs kstat output
# dbufstat and the dbufs kstat output
#
DBUFSTATS_FILE=$(mktemp $TEST_BASE_DIR/dbufstats.out.XXXXXX)
@ -56,7 +56,7 @@ function testdbufstat # stat_name dbufstat_filter
[[ -n "$2" ]] && filter="-F $2"
from_dbufstat=$(grep -w "$name" "$DBUFSTATS_FILE" | awk '{ print $3 }')
from_dbufs=$(dbufstat.py -bxn -i "$DBUFS_FILE" "$filter" | wc -l)
from_dbufs=$(dbufstat -bxn -i "$DBUFS_FILE" "$filter" | wc -l)
within_tolerance $from_dbufstat $from_dbufs 9 \
|| log_fail "Stat $name exceeded tolerance"

View File

@ -62,18 +62,18 @@ objid=$(stat --format="%i" "$TESTDIR/file")
log_note "Object ID for $TESTDIR/file is $objid"
log_must eval "cat /proc/spl/kstat/zfs/dbufs > $DBUFS_FILE"
dbuf=$(dbufstat.py -bxn -i "$DBUFS_FILE" -F "object=$objid" | wc -l)
mru=$(dbufstat.py -bxn -i "$DBUFS_FILE" -F "object=$objid,list=1" | wc -l)
mfu=$(dbufstat.py -bxn -i "$DBUFS_FILE" -F "object=$objid,list=3" | wc -l)
dbuf=$(dbufstat -bxn -i "$DBUFS_FILE" -F "object=$objid" | wc -l)
mru=$(dbufstat -bxn -i "$DBUFS_FILE" -F "object=$objid,list=1" | wc -l)
mfu=$(dbufstat -bxn -i "$DBUFS_FILE" -F "object=$objid,list=3" | wc -l)
log_note "dbuf count is $dbuf, mru count is $mru, mfu count is $mfu"
verify_ne "0" "$mru" "mru count"
verify_eq "0" "$mfu" "mfu count"
log_must eval "cat $TESTDIR/file > /dev/null"
log_must eval "cat /proc/spl/kstat/zfs/dbufs > $DBUFS_FILE"
dbuf=$(dbufstat.py -bxn -i "$DBUFS_FILE" -F "object=$objid" | wc -l)
mru=$(dbufstat.py -bxn -i "$DBUFS_FILE" -F "object=$objid,list=1" | wc -l)
mfu=$(dbufstat.py -bxn -i "$DBUFS_FILE" -F "object=$objid,list=3" | wc -l)
dbuf=$(dbufstat -bxn -i "$DBUFS_FILE" -F "object=$objid" | wc -l)
mru=$(dbufstat -bxn -i "$DBUFS_FILE" -F "object=$objid,list=1" | wc -l)
mfu=$(dbufstat -bxn -i "$DBUFS_FILE" -F "object=$objid,list=3" | wc -l)
log_note "dbuf count is $dbuf, mru count is $mru, mfu count is $mfu"
verify_ne "0" "$mfu" "mfu count"

View File

@ -46,7 +46,6 @@ dist_pkgdata_SCRIPTS = \
arcstat_001_pos.ksh \
arc_summary_001_pos.ksh \
arc_summary_002_neg.ksh \
arc_summary3_001_pos.ksh \
dbufstat_001_pos.ksh
dist_pkgdata_DATA = \

View File

@ -1,56 +0,0 @@
#! /bin/ksh -p
#
# CDDL HEADER START
#
# The contents of this file are subject to the terms of the
# Common Development and Distribution License (the "License").
# You may not use this file except in compliance with the License.
#
# You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
# or http://www.opensolaris.org/os/licensing.
# See the License for the specific language governing permissions
# and limitations under the License.
#
# When distributing Covered Code, include this CDDL HEADER in each
# file and include the License file at usr/src/OPENSOLARIS.LICENSE.
# If applicable, add the following below this CDDL HEADER, with the
# fields enclosed by brackets "[]" replaced with your own identifying
# information: Portions Copyright [yyyy] [name of copyright owner]
#
# CDDL HEADER END
#
#
# Copyright (c) 2015 by Lawrence Livermore National Security, LLC.
# All rights reserved.
#
. $STF_SUITE/include/libtest.shlib
# Keep the following test until Python 3 is installed on all test systems,
# then remove
python3 -V 2>&1 > /dev/null
if (( $? )); then
log_unsupported "Python3 is not installed"
fi
# Some systems have Python 3 installed, but only older versions that don't
# have the subprocess.run() functionality. We catch these with a separate
# test. Remove this when all systems have reached 3.5 or greater
VERSIONPYTEST=$(python3 -V)
if [[ ${VERSIONPYTEST:9:1} -lt 5 ]]; then
log_unsupported "Python3 must be version 3.5 or greater"
fi
set -A args "" "-a" "-d" "-p 1" "-g" "-s arc" "-r"
log_assert "arc_summary3.py generates output and doesn't return an error code"
typeset -i i=0
while [[ $i -lt ${#args[*]} ]]; do
log_must eval "arc_summary3.py ${args[i]} > /dev/null"
((i = i + 1))
done
log_pass "arc_summary3.py generates output and doesn't return an error code"

View File

@ -27,17 +27,34 @@
. $STF_SUITE/include/libtest.shlib
set -A args "" "-a" "-d" "-p 1"
log_assert "arc_summary generates output and doesn't return an error code"
log_assert "arc_summary.py generates output and doesn't return an error code"
# Depending on which version of arc_summary is installed some command
# line options may not be available. The python3 version includes
# several additional flags.
python3 -V 2>&1 > /dev/null
if (( $? )); then
# Some systems have Python 3 installed, but only older versions
# that don't have the subprocess.run() functionality. We catch
# these with a separate test. Remove this when all systems have
# reached 3.5 or greater
VERSIONPYTEST=$(python3 -V)
if [[ ${VERSIONPYTEST:9:1} -lt 5 ]]; then
set -A args "" "-a" "-d" "-p 1"
else
set -A args "" "-a" "-d" "-p 1" "-g" "-s arc" "-r"
fi
else
set -A args "" "-a" "-d" "-p 1"
fi
typeset -i i=0
while [[ $i -lt ${#args[*]} ]]; do
log_must eval "arc_summary.py ${args[i]} > /dev/null"
((i = i + 1))
log_must eval "arc_summary ${args[i]} > /dev/null"
((i = i + 1))
done
log_must eval "arc_summary.py | head > /dev/null"
log_must eval "arc_summary.py | head -1 > /dev/null"
log_must eval "arc_summary | head > /dev/null"
log_must eval "arc_summary | head -1 > /dev/null"
log_pass "arc_summary.py generates output and doesn't return an error code"
log_pass "arc_summary generates output and doesn't return an error code"

View File

@ -27,12 +27,12 @@
. $STF_SUITE/include/libtest.shlib
typeset args=("-x" "-r" "-5" "-p 7" "--err" "-@")
typeset args=("-x" "-5" "-p 7" "--err" "-@")
log_assert "arc_summary.py generates an error code with invalid options"
log_assert "arc_summary generates an error code with invalid options"
for arg in "${args[@]}"; do
log_mustnot eval "arc_summary.py $arg > /dev/null"
log_mustnot eval "arc_summary $arg > /dev/null"
done
log_pass "arc_summary.py generates an error code with invalid options"
log_pass "arc_summary generates an error code with invalid options"

View File

@ -30,12 +30,12 @@
set -A args "" "-s \",\"" "-x" "-v" \
"-f time,hit%,dh%,ph%,mh%"
log_assert "arcstat.py generates output and doesn't return an error code"
log_assert "arcstat generates output and doesn't return an error code"
typeset -i i=0
while [[ $i -lt ${#args[*]} ]]; do
log_must eval "arcstat.py ${args[i]} > /dev/null"
log_must eval "arcstat ${args[i]} > /dev/null"
((i = i + 1))
done
log_pass "arcstat.py generates output and doesn't return an error code"
log_pass "arcstat generates output and doesn't return an error code"

View File

@ -32,19 +32,20 @@
. $STF_SUITE/include/libtest.shlib
. $STF_SUITE/tests/functional/cli_user/misc/misc.cfg
if poolexists $TESTPOOL.virt
then
if poolexists $TESTPOOL.virt; then
log_must zpool destroy $TESTPOOL.virt
fi
if poolexists v1-pool
then
if poolexists v1-pool; then
log_must zpool destroy v1-pool
fi
if [[ -f $TEST_BASE_DIR/zfstest_datastream.dat ]]
then
log_must rm -f $TEST_BASE_DIR/zfstest_datastream.dat
fi
log_must rm -f $TEST_BASE_DIR/zfstest_datastream.dat
log_must rm -f $TEST_BASE_DIR/disk1.dat $TEST_BASE_DIR/disk2.dat \
$TEST_BASE_DIR/disk3.dat $TEST_BASE_DIR/disk-additional.dat \
$TEST_BASE_DIR/disk-export.dat $TEST_BASE_DIR/disk-offline.dat \
$TEST_BASE_DIR/disk-spare1.dat $TEST_BASE_DIR/disk-spare2.dat
log_must rm -f $TEST_BASE_DIR/zfs-pool-v1.dat \
$TEST_BASE_DIR/zfs-pool-v1.dat.bz2
default_cleanup

View File

@ -29,15 +29,15 @@
set -A args "" "-b" "-d" "-r" "-v" "-s \",\"" "-x" "-n"
log_assert "dbufstat.py generates output and doesn't return an error code"
log_assert "dbufstat generates output and doesn't return an error code"
typeset -i i=0
while [[ $i -lt ${#args[*]} ]]; do
log_must eval "dbufstat.py ${args[i]} > /dev/null"
log_must eval "dbufstat ${args[i]} > /dev/null"
((i = i + 1))
done
# A simple test of dbufstat.py filter functionality
log_must eval "dbufstat.py -F object=10,dbc=1,pool=$TESTPOOL > /dev/null"
# A simple test of dbufstat filter functionality
log_must eval "dbufstat -F object=10,dbc=1,pool=$TESTPOOL > /dev/null"
log_pass "dbufstat.py generates output and doesn't return an error code"
log_pass "dbufstat generates output and doesn't return an error code"

View File

@ -0,0 +1 @@
pyzfs_unittest.ksh

View File

@ -1,4 +1,18 @@
pkgdatadir = $(datadir)/@PACKAGE@/zfs-tests/tests/functional/pyzfs
dist_pkgdata_SCRIPTS = \
pkgpyzfsdir = $(datadir)/@PACKAGE@/zfs-tests/tests/functional/pyzfs
pkgpyzfs_SCRIPTS = \
pyzfs_unittest.ksh
EXTRA_DIST = \
pyzfs_unittest.ksh.in
#
# The pyzfs module is built either for Python 2 or Python 3. In order
# to properly test it the unit tests must be updated to the matching vesion.
#
$(pkgpyzfs_SCRIPTS):%:%.in
-$(SED) -e 's,@PYTHON\@,$(PYTHON),g' \
$< >'$@'
-chmod 775 $@
distclean-local::
-$(RM) $(pkgpyzfs_SCRIPTS)

View File

@ -28,7 +28,7 @@
verify_runnable "global"
# Verify that the required dependencies for testing are installed.
python -c "import cffi" 2>/dev/null
@PYTHON@ -c "import cffi" 2>/dev/null
if [ $? -eq 1 ]; then
log_unsupported "python-cffi not found by Python"
fi
@ -37,7 +37,7 @@ fi
# only if pyzfs was not installed due to missing, build-time, dependencies; if
# we cannot load "libzfs_core" due to other reasons, for instance an API/ABI
# mismatch, we want to report it.
python -c '
@PYTHON@ -c '
import pkgutil, sys
sys.exit(pkgutil.find_loader("libzfs_core") is None)'
if [ $? -eq 1 ]; then
@ -47,7 +47,7 @@ fi
log_assert "Verify the nvlist and libzfs_core Python unittest run successfully"
# NOTE: don't use log_must() here because it makes output unreadable
python -m unittest --verbose \
@PYTHON@ -m unittest --verbose \
libzfs_core.test.test_nvlist.TestNVList \
libzfs_core.test.test_libzfs_core.ZFSTest
if [ $? -ne 0 ]; then