mirror of
https://git.proxmox.com/git/mirror_zfs.git
synced 2025-02-04 06:14:29 +03:00
Python 2 and 3 compatibility
With Python 2 (slowly) approaching EOL and its removal from distribitions already being planned (Fedora), the existing Python 2 code needs to be transitioned to Python 3. This patch stack updates the Python code to be compatible with Python 2.7, 3.4, 3.5, 3.6, and 3.7. Reviewed-by: John Ramsden <johnramsden@riseup.net> Reviewed-by: Neal Gompa <ngompa@datto.com> Reviewed-by: loli10K <ezomori.nozomu@gmail.com> Reviewed-by: Brian Behlendorf <behlendorf1@llnl.gov> Reviewed-by: John Wren Kennedy <john.kennedy@delphix.com> Reviewed-by: Antonio Russo <antonio.e.russo@gmail.com> Closes #8096
This commit is contained in:
commit
c87db59196
2
.gitignore
vendored
2
.gitignore
vendored
@ -22,6 +22,8 @@
|
|||||||
*.swp
|
*.swp
|
||||||
*.gcno
|
*.gcno
|
||||||
*.gcda
|
*.gcda
|
||||||
|
*.pyc
|
||||||
|
*.pyo
|
||||||
.deps
|
.deps
|
||||||
.libs
|
.libs
|
||||||
.dirstamp
|
.dirstamp
|
||||||
|
@ -1 +1,13 @@
|
|||||||
dist_bin_SCRIPTS = arc_summary.py arc_summary3.py
|
EXTRA_DIST = arc_summary2 arc_summary3
|
||||||
|
|
||||||
|
if USING_PYTHON_2
|
||||||
|
dist_bin_SCRIPTS = arc_summary2
|
||||||
|
install-exec-hook:
|
||||||
|
mv $(DESTDIR)$(bindir)/arc_summary2 $(DESTDIR)$(bindir)/arc_summary
|
||||||
|
endif
|
||||||
|
|
||||||
|
if USING_PYTHON_3
|
||||||
|
dist_bin_SCRIPTS = arc_summary3
|
||||||
|
install-exec-hook:
|
||||||
|
mv $(DESTDIR)$(bindir)/arc_summary3 $(DESTDIR)$(bindir)/arc_summary
|
||||||
|
endif
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
#!/usr/bin/python
|
#!/usr/bin/python2
|
||||||
#
|
#
|
||||||
# $Id: arc_summary.pl,v 388:e27800740aa2 2011-07-08 02:53:29Z jhell $
|
# $Id: arc_summary.pl,v 388:e27800740aa2 2011-07-08 02:53:29Z jhell $
|
||||||
#
|
#
|
||||||
@ -35,6 +35,8 @@
|
|||||||
# Note some of this code uses older code (eg getopt instead of argparse,
|
# Note some of this code uses older code (eg getopt instead of argparse,
|
||||||
# subprocess.Popen() instead of subprocess.run()) because we need to support
|
# subprocess.Popen() instead of subprocess.run()) because we need to support
|
||||||
# some very old versions of Python.
|
# some very old versions of Python.
|
||||||
|
#
|
||||||
|
|
||||||
"""Print statistics on the ZFS Adjustable Replacement Cache (ARC)
|
"""Print statistics on the ZFS Adjustable Replacement Cache (ARC)
|
||||||
|
|
||||||
Provides basic information on the ARC, its efficiency, the L2ARC (if present),
|
Provides basic information on the ARC, its efficiency, the L2ARC (if present),
|
||||||
@ -1005,7 +1007,7 @@ def zfs_header():
|
|||||||
def usage():
|
def usage():
|
||||||
"""Print usage information"""
|
"""Print usage information"""
|
||||||
|
|
||||||
sys.stdout.write("Usage: arc_summary.py [-h] [-a] [-d] [-p PAGE]\n\n")
|
sys.stdout.write("Usage: arc_summary [-h] [-a] [-d] [-p PAGE]\n\n")
|
||||||
sys.stdout.write("\t -h, --help : "
|
sys.stdout.write("\t -h, --help : "
|
||||||
"Print this help message and exit\n")
|
"Print this help message and exit\n")
|
||||||
sys.stdout.write("\t -a, --alternate : "
|
sys.stdout.write("\t -a, --alternate : "
|
||||||
@ -1018,10 +1020,10 @@ def usage():
|
|||||||
"should be an integer between 1 and " +
|
"should be an integer between 1 and " +
|
||||||
str(len(unSub)) + "\n\n")
|
str(len(unSub)) + "\n\n")
|
||||||
sys.stdout.write("Examples:\n")
|
sys.stdout.write("Examples:\n")
|
||||||
sys.stdout.write("\tarc_summary.py -a\n")
|
sys.stdout.write("\tarc_summary -a\n")
|
||||||
sys.stdout.write("\tarc_summary.py -p 4\n")
|
sys.stdout.write("\tarc_summary -p 4\n")
|
||||||
sys.stdout.write("\tarc_summary.py -ad\n")
|
sys.stdout.write("\tarc_summary -ad\n")
|
||||||
sys.stdout.write("\tarc_summary.py --page=2\n")
|
sys.stdout.write("\tarc_summary --page=2\n")
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
@ -346,7 +346,7 @@ def get_version(request):
|
|||||||
error_msg = '(ERROR: "{0}" requested)'.format(request)
|
error_msg = '(ERROR: "{0}" requested)'.format(request)
|
||||||
return error_msg
|
return error_msg
|
||||||
|
|
||||||
# The original arc_summary.py called /sbin/modinfo/{spl,zfs} to get
|
# The original arc_summary called /sbin/modinfo/{spl,zfs} to get
|
||||||
# the version information. We switch to /sys/module/{spl,zfs}/version
|
# the version information. We switch to /sys/module/{spl,zfs}/version
|
||||||
# to make sure we get what is really loaded in the kernel
|
# to make sure we get what is really loaded in the kernel
|
||||||
command = ["cat", "/sys/module/{0}/version".format(request)]
|
command = ["cat", "/sys/module/{0}/version".format(request)]
|
||||||
@ -374,7 +374,7 @@ def print_header():
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
# datetime is now recommended over time but we keep the exact formatting
|
# datetime is now recommended over time but we keep the exact formatting
|
||||||
# from the older version of arc_summary.py in case there are scripts
|
# from the older version of arc_summary in case there are scripts
|
||||||
# that expect it in this way
|
# that expect it in this way
|
||||||
daydate = time.strftime(DATE_FORMAT)
|
daydate = time.strftime(DATE_FORMAT)
|
||||||
spc_date = LINE_LENGTH-len(daydate)
|
spc_date = LINE_LENGTH-len(daydate)
|
||||||
@ -586,7 +586,7 @@ def section_archits(kstats_dict):
|
|||||||
|
|
||||||
# For some reason, anon_hits can turn negative, which is weird. Until we
|
# For some reason, anon_hits can turn negative, which is weird. Until we
|
||||||
# have figured out why this happens, we just hide the problem, following
|
# have figured out why this happens, we just hide the problem, following
|
||||||
# the behavior of the original arc_summary.py
|
# the behavior of the original arc_summary.
|
||||||
if anon_hits >= 0:
|
if anon_hits >= 0:
|
||||||
prt_i2('Anonymously used:',
|
prt_i2('Anonymously used:',
|
||||||
f_perc(anon_hits, arc_stats['hits']), f_hits(anon_hits))
|
f_perc(anon_hits, arc_stats['hits']), f_hits(anon_hits))
|
@ -1 +1,13 @@
|
|||||||
dist_bin_SCRIPTS = arcstat.py
|
dist_bin_SCRIPTS = arcstat
|
||||||
|
|
||||||
|
#
|
||||||
|
# The arcstat script is compatibile with both Python 2.6 and 3.4.
|
||||||
|
# As such the python 3 shebang can be replaced at install time when
|
||||||
|
# targeting a python 2 system. This allows us to maintain a single
|
||||||
|
# version of the source.
|
||||||
|
#
|
||||||
|
if USING_PYTHON_2
|
||||||
|
install-exec-hook:
|
||||||
|
sed --in-place 's|^#!/usr/bin/python3|#!/usr/bin/python2|' \
|
||||||
|
$(DESTDIR)$(bindir)/arcstat
|
||||||
|
endif
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
#!/usr/bin/python
|
#!/usr/bin/python3
|
||||||
#
|
#
|
||||||
# Print out ZFS ARC Statistics exported via kstat(1)
|
# Print out ZFS ARC Statistics exported via kstat(1)
|
||||||
# For a definition of fields, or usage, use arctstat.pl -v
|
# For a definition of fields, or usage, use arctstat.pl -v
|
||||||
@ -42,7 +42,8 @@
|
|||||||
# @hdr is the array of fields that needs to be printed, so we
|
# @hdr is the array of fields that needs to be printed, so we
|
||||||
# just iterate over this array and print the values using our pretty printer.
|
# just iterate over this array and print the values using our pretty printer.
|
||||||
#
|
#
|
||||||
|
# This script must remain compatible with Python 2.6+ and Python 3.4+.
|
||||||
|
#
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
@ -109,7 +110,7 @@ opfile = None
|
|||||||
sep = " " # Default separator is 2 spaces
|
sep = " " # Default separator is 2 spaces
|
||||||
version = "0.4"
|
version = "0.4"
|
||||||
l2exist = False
|
l2exist = False
|
||||||
cmd = ("Usage: arcstat.py [-hvx] [-f fields] [-o file] [-s string] [interval "
|
cmd = ("Usage: arcstat [-hvx] [-f fields] [-o file] [-s string] [interval "
|
||||||
"[count]]\n")
|
"[count]]\n")
|
||||||
cur = {}
|
cur = {}
|
||||||
d = {}
|
d = {}
|
||||||
@ -138,10 +139,10 @@ def usage():
|
|||||||
sys.stderr.write("\t -s : Override default field separator with custom "
|
sys.stderr.write("\t -s : Override default field separator with custom "
|
||||||
"character or string\n")
|
"character or string\n")
|
||||||
sys.stderr.write("\nExamples:\n")
|
sys.stderr.write("\nExamples:\n")
|
||||||
sys.stderr.write("\tarcstat.py -o /tmp/a.log 2 10\n")
|
sys.stderr.write("\tarcstat -o /tmp/a.log 2 10\n")
|
||||||
sys.stderr.write("\tarcstat.py -s \",\" -o /tmp/a.log 2 10\n")
|
sys.stderr.write("\tarcstat -s \",\" -o /tmp/a.log 2 10\n")
|
||||||
sys.stderr.write("\tarcstat.py -v\n")
|
sys.stderr.write("\tarcstat -v\n")
|
||||||
sys.stderr.write("\tarcstat.py -f time,hit%,dh%,ph%,mh% 1\n")
|
sys.stderr.write("\tarcstat -f time,hit%,dh%,ph%,mh% 1\n")
|
||||||
sys.stderr.write("\n")
|
sys.stderr.write("\n")
|
||||||
|
|
||||||
sys.exit(1)
|
sys.exit(1)
|
@ -1 +1,13 @@
|
|||||||
dist_bin_SCRIPTS = dbufstat.py
|
dist_bin_SCRIPTS = dbufstat
|
||||||
|
|
||||||
|
#
|
||||||
|
# The dbufstat script is compatibile with both Python 2.6 and 3.4.
|
||||||
|
# As such the python 3 shebang can be replaced at install time when
|
||||||
|
# targeting a python 2 system. This allows us to maintain a single
|
||||||
|
# version of the source.
|
||||||
|
#
|
||||||
|
if USING_PYTHON_2
|
||||||
|
install-exec-hook:
|
||||||
|
sed --in-place 's|^#!/usr/bin/python3|#!/usr/bin/python2|' \
|
||||||
|
$(DESTDIR)$(bindir)/dbufstat
|
||||||
|
endif
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
#!/usr/bin/python
|
#!/usr/bin/python3
|
||||||
#
|
#
|
||||||
# Print out statistics for all cached dmu buffers. This information
|
# Print out statistics for all cached dmu buffers. This information
|
||||||
# is available through the dbufs kstat and may be post-processed as
|
# is available through the dbufs kstat and may be post-processed as
|
||||||
@ -27,6 +27,8 @@
|
|||||||
# Copyright (C) 2013 Lawrence Livermore National Security, LLC.
|
# Copyright (C) 2013 Lawrence Livermore National Security, LLC.
|
||||||
# Produced at Lawrence Livermore National Laboratory (cf, DISCLAIMER).
|
# Produced at Lawrence Livermore National Laboratory (cf, DISCLAIMER).
|
||||||
#
|
#
|
||||||
|
# This script must remain compatible with Python 2.6+ and Python 3.4+.
|
||||||
|
#
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
import getopt
|
import getopt
|
||||||
@ -106,7 +108,7 @@ cols = {
|
|||||||
hdr = None
|
hdr = None
|
||||||
xhdr = None
|
xhdr = None
|
||||||
sep = " " # Default separator is 2 spaces
|
sep = " " # Default separator is 2 spaces
|
||||||
cmd = ("Usage: dbufstat.py [-bdhnrtvx] [-i file] [-f fields] [-o file] "
|
cmd = ("Usage: dbufstat [-bdhnrtvx] [-i file] [-f fields] [-o file] "
|
||||||
"[-s string] [-F filter]\n")
|
"[-s string] [-F filter]\n")
|
||||||
raw = 0
|
raw = 0
|
||||||
|
|
||||||
@ -167,11 +169,11 @@ def usage():
|
|||||||
"character or string\n")
|
"character or string\n")
|
||||||
sys.stderr.write("\t -F : Filter output by value or regex\n")
|
sys.stderr.write("\t -F : Filter output by value or regex\n")
|
||||||
sys.stderr.write("\nExamples:\n")
|
sys.stderr.write("\nExamples:\n")
|
||||||
sys.stderr.write("\tdbufstat.py -d -o /tmp/d.log\n")
|
sys.stderr.write("\tdbufstat -d -o /tmp/d.log\n")
|
||||||
sys.stderr.write("\tdbufstat.py -t -s \",\" -o /tmp/t.log\n")
|
sys.stderr.write("\tdbufstat -t -s \",\" -o /tmp/t.log\n")
|
||||||
sys.stderr.write("\tdbufstat.py -v\n")
|
sys.stderr.write("\tdbufstat -v\n")
|
||||||
sys.stderr.write("\tdbufstat.py -d -f pool,object,objset,dsize,cached\n")
|
sys.stderr.write("\tdbufstat -d -f pool,object,objset,dsize,cached\n")
|
||||||
sys.stderr.write("\tdbufstat.py -bx -F dbc=1,objset=54,pool=testpool\n")
|
sys.stderr.write("\tdbufstat -bx -F dbc=1,objset=54,pool=testpool\n")
|
||||||
sys.stderr.write("\n")
|
sys.stderr.write("\n")
|
||||||
|
|
||||||
sys.exit(1)
|
sys.exit(1)
|
102
config/always-python.m4
Normal file
102
config/always-python.m4
Normal file
@ -0,0 +1,102 @@
|
|||||||
|
dnl #
|
||||||
|
dnl # ZFS_AC_PYTHON_VERSION(version, [action-if-true], [action-if-false])
|
||||||
|
dnl #
|
||||||
|
dnl # Verify Python version
|
||||||
|
dnl #
|
||||||
|
AC_DEFUN([ZFS_AC_PYTHON_VERSION], [
|
||||||
|
ver_check=`$PYTHON -c "import sys; print (sys.version.split()[[0]] $1)"`
|
||||||
|
AS_IF([test "$ver_check" = "True"], [
|
||||||
|
m4_ifvaln([$2], [$2])
|
||||||
|
], [
|
||||||
|
m4_ifvaln([$3], [$3])
|
||||||
|
])
|
||||||
|
])
|
||||||
|
|
||||||
|
dnl #
|
||||||
|
dnl # ZFS_AC_PYTHON_MODULE(module_name, [action-if-true], [action-if-false])
|
||||||
|
dnl #
|
||||||
|
dnl # Checks for Python module. Freely inspired by AX_PYTHON_MODULE
|
||||||
|
dnl # https://www.gnu.org/software/autoconf-archive/ax_python_module.html
|
||||||
|
dnl # Required by ZFS_AC_CONFIG_ALWAYS_PYZFS.
|
||||||
|
dnl #
|
||||||
|
AC_DEFUN([ZFS_AC_PYTHON_MODULE], [
|
||||||
|
PYTHON_NAME=`basename $PYTHON`
|
||||||
|
AC_MSG_CHECKING([for $PYTHON_NAME module: $1])
|
||||||
|
AS_IF([$PYTHON -c "import $1" 2>/dev/null], [
|
||||||
|
AC_MSG_RESULT(yes)
|
||||||
|
m4_ifvaln([$2], [$2])
|
||||||
|
], [
|
||||||
|
AC_MSG_RESULT(no)
|
||||||
|
m4_ifvaln([$3], [$3])
|
||||||
|
])
|
||||||
|
])
|
||||||
|
|
||||||
|
dnl #
|
||||||
|
dnl # The majority of the python scripts are written to be compatible
|
||||||
|
dnl # with Python 2.6 and Python 3.4. Therefore, they may be installed
|
||||||
|
dnl # and used with either interpreter. This option is intended to
|
||||||
|
dnl # to provide a method to specify the default system version, and
|
||||||
|
dnl # set the PYTHON environment variable accordingly.
|
||||||
|
dnl #
|
||||||
|
AC_DEFUN([ZFS_AC_CONFIG_ALWAYS_PYTHON], [
|
||||||
|
AC_ARG_WITH([python],
|
||||||
|
AC_HELP_STRING([--with-python[=VERSION]],
|
||||||
|
[default system python version @<:@default=check@:>@]),
|
||||||
|
[with_python=$withval],
|
||||||
|
[with_python=check])
|
||||||
|
|
||||||
|
AS_CASE([$with_python],
|
||||||
|
[check],
|
||||||
|
[AS_IF([test -x /usr/bin/python3],
|
||||||
|
[PYTHON="python3"],
|
||||||
|
[AS_IF([test -x /usr/bin/python2],
|
||||||
|
[PYTHON="python2"],
|
||||||
|
[PYTHON=""]
|
||||||
|
)]
|
||||||
|
)],
|
||||||
|
[2*], [PYTHON="python${with_python}"],
|
||||||
|
[*python2*], [PYTHON="${with_python}"],
|
||||||
|
[3*], [PYTHON="python${with_python}"],
|
||||||
|
[*python3*], [PYTHON="${with_python}"],
|
||||||
|
[no], [PYTHON=""],
|
||||||
|
[AC_MSG_ERROR([Unknown --with-python value '$with_python'])]
|
||||||
|
)
|
||||||
|
|
||||||
|
AS_IF([$PYTHON --version >/dev/null 2>&1], [ /bin/true ], [
|
||||||
|
AC_MSG_ERROR([Cannot find $PYTHON in your system path])
|
||||||
|
])
|
||||||
|
|
||||||
|
AM_PATH_PYTHON([2.6], [], [:])
|
||||||
|
AM_CONDITIONAL([USING_PYTHON], [test "$PYTHON" != :])
|
||||||
|
AM_CONDITIONAL([USING_PYTHON_2], [test "${PYTHON_VERSION:0:2}" = "2."])
|
||||||
|
AM_CONDITIONAL([USING_PYTHON_3], [test "${PYTHON_VERSION:0:2}" = "3."])
|
||||||
|
|
||||||
|
dnl #
|
||||||
|
dnl # Minimum supported Python versions for utilities:
|
||||||
|
dnl # Python 2.6.x, or Python 3.4.x
|
||||||
|
dnl #
|
||||||
|
AS_IF([test "${PYTHON_VERSION:0:2}" = "2."], [
|
||||||
|
ZFS_AC_PYTHON_VERSION([>= '2.6'], [ /bin/true ],
|
||||||
|
[AC_MSG_ERROR("Python >= 2.6.x is not available")])
|
||||||
|
])
|
||||||
|
|
||||||
|
AS_IF([test "${PYTHON_VERSION:0:2}" = "3."], [
|
||||||
|
ZFS_AC_PYTHON_VERSION([>= '3.4'], [ /bin/true ],
|
||||||
|
[AC_MSG_ERROR("Python >= 3.4.x is not available")])
|
||||||
|
])
|
||||||
|
|
||||||
|
dnl #
|
||||||
|
dnl # Request that packages be built for a specific Python version.
|
||||||
|
dnl #
|
||||||
|
AS_IF([test $with_python != check], [
|
||||||
|
PYTHON_PKG_VERSION=`echo ${PYTHON} | tr -d 'a-zA-Z.'`
|
||||||
|
DEFINE_PYTHON_PKG_VERSION='--define "__use_python_pkg_version '${PYTHON_PKG_VERSION}'"'
|
||||||
|
DEFINE_PYTHON_VERSION='--define "__use_python '${PYTHON}'"'
|
||||||
|
], [
|
||||||
|
DEFINE_PYTHON_VERSION=''
|
||||||
|
DEFINE_PYTHON_PKG_VERSION=''
|
||||||
|
])
|
||||||
|
|
||||||
|
AC_SUBST(DEFINE_PYTHON_VERSION)
|
||||||
|
AC_SUBST(DEFINE_PYTHON_PKG_VERSION)
|
||||||
|
])
|
@ -1,80 +1,44 @@
|
|||||||
dnl #
|
dnl #
|
||||||
dnl # ZFS_AC_PYTHON_MODULE(module_name, [action-if-true], [action-if-false])
|
dnl # Determines if pyzfs can be built, requires Python 2.7 or latter.
|
||||||
dnl #
|
dnl #
|
||||||
dnl # Checks for Python module. Freely inspired by AX_PYTHON_MODULE
|
|
||||||
dnl # https://www.gnu.org/software/autoconf-archive/ax_python_module.html
|
|
||||||
dnl #
|
|
||||||
AC_DEFUN([ZFS_AC_PYTHON_MODULE],[
|
|
||||||
PYTHON_NAME=`basename $PYTHON`
|
|
||||||
AC_MSG_CHECKING([for $PYTHON_NAME module: $1])
|
|
||||||
$PYTHON -c "import $1" 2>/dev/null
|
|
||||||
if test $? -eq 0;
|
|
||||||
then
|
|
||||||
AC_MSG_RESULT(yes)
|
|
||||||
m4_ifvaln([$2], [$2])
|
|
||||||
else
|
|
||||||
AC_MSG_RESULT(no)
|
|
||||||
m4_ifvaln([$3], [$3])
|
|
||||||
fi
|
|
||||||
])
|
|
||||||
|
|
||||||
dnl #
|
|
||||||
dnl # ZFS_AC_PYTHON_VERSION(version, [action-if-true], [action-if-false])
|
|
||||||
dnl #
|
|
||||||
dnl # Verify Python version
|
|
||||||
dnl #
|
|
||||||
AC_DEFUN([ZFS_AC_PYTHON_VERSION], [
|
|
||||||
AC_MSG_CHECKING([for a version of Python $1])
|
|
||||||
version_check=`$PYTHON -c "import sys; print (sys.version.split()[[0]] $1)"`
|
|
||||||
if test "$version_check" = "True";
|
|
||||||
then
|
|
||||||
AC_MSG_RESULT(yes)
|
|
||||||
m4_ifvaln([$2], [$2])
|
|
||||||
else
|
|
||||||
AC_MSG_RESULT(no)
|
|
||||||
m4_ifvaln([$3], [$3])
|
|
||||||
fi
|
|
||||||
|
|
||||||
])
|
|
||||||
|
|
||||||
AC_DEFUN([ZFS_AC_CONFIG_ALWAYS_PYZFS], [
|
AC_DEFUN([ZFS_AC_CONFIG_ALWAYS_PYZFS], [
|
||||||
PYTHON_REQUIRED_VERSION="<= '2.7.x'"
|
|
||||||
|
|
||||||
AC_ARG_ENABLE([pyzfs],
|
AC_ARG_ENABLE([pyzfs],
|
||||||
AC_HELP_STRING([--enable-pyzfs],
|
AC_HELP_STRING([--enable-pyzfs],
|
||||||
[install libzfs_core python bindings @<:@default=check@:>@]),
|
[install libzfs_core python bindings @<:@default=check@:>@]),
|
||||||
[enable_pyzfs=$enableval],
|
[enable_pyzfs=$enableval],
|
||||||
[enable_pyzfs=check])
|
[enable_pyzfs=check])
|
||||||
|
|
||||||
AM_PATH_PYTHON([2.7], [], [
|
dnl #
|
||||||
|
dnl # Packages for pyzfs specifically enabled/disabled.
|
||||||
|
dnl #
|
||||||
|
AS_IF([test "x$enable_pyzfs" != xcheck], [
|
||||||
AS_IF([test "x$enable_pyzfs" = xyes], [
|
AS_IF([test "x$enable_pyzfs" = xyes], [
|
||||||
AC_MSG_ERROR("python >= 2.7 is not installed")
|
DEFINE_PYZFS='--with pyzfs'
|
||||||
], [test ! "x$enable_pyzfs" = xno], [
|
], [
|
||||||
enable_pyzfs=no
|
DEFINE_PYZFS='--without pyzfs'
|
||||||
])
|
|
||||||
])
|
|
||||||
AM_CONDITIONAL([HAVE_PYTHON], [test "$PYTHON" != :])
|
|
||||||
|
|
||||||
dnl #
|
|
||||||
dnl # Python 2.7.x is supported, other versions (3.5) are not yet
|
|
||||||
dnl #
|
|
||||||
AS_IF([test "x$enable_pyzfs" = xcheck], [
|
|
||||||
ZFS_AC_PYTHON_VERSION([$PYTHON_REQUIRED_VERSION], [], [
|
|
||||||
AS_IF([test "x$enable_pyzfs" = xyes], [
|
|
||||||
AC_MSG_ERROR("Python $PYTHON_REQUIRED_VERSION is not available")
|
|
||||||
], [test ! "x$enable_pyzfs" = xno], [
|
|
||||||
enable_pyzfs=no
|
|
||||||
])
|
|
||||||
])
|
])
|
||||||
|
], [
|
||||||
|
DEFINE_PYZFS=''
|
||||||
])
|
])
|
||||||
|
AC_SUBST(DEFINE_PYZFS)
|
||||||
|
|
||||||
dnl #
|
dnl #
|
||||||
dnl # Require python-devel libraries
|
dnl # Require python-devel libraries
|
||||||
dnl #
|
dnl #
|
||||||
AS_IF([test "x$enable_pyzfs" = xcheck], [
|
AS_IF([test "x$enable_pyzfs" = xcheck -o "x$enable_pyzfs" = xyes], [
|
||||||
|
AS_IF([test "${PYTHON_VERSION:0:2}" = "2."], [
|
||||||
|
PYTHON_REQUIRED_VERSION=">= '2.7.0'"
|
||||||
|
], [
|
||||||
|
AS_IF([test "${PYTHON_VERSION:0:2}" = "3."], [
|
||||||
|
PYTHON_REQUIRED_VERSION=">= '3.4.0'"
|
||||||
|
], [
|
||||||
|
AC_MSG_ERROR("Python $PYTHON_VERSION unknown")
|
||||||
|
])
|
||||||
|
])
|
||||||
|
|
||||||
AX_PYTHON_DEVEL([$PYTHON_REQUIRED_VERSION], [
|
AX_PYTHON_DEVEL([$PYTHON_REQUIRED_VERSION], [
|
||||||
AS_IF([test "x$enable_pyzfs" = xyes], [
|
AS_IF([test "x$enable_pyzfs" = xyes], [
|
||||||
AC_MSG_ERROR("Python development library is not available")
|
AC_MSG_ERROR("Python $PYTHON_REQUIRED_VERSION development library is not installed")
|
||||||
], [test ! "x$enable_pyzfs" = xno], [
|
], [test ! "x$enable_pyzfs" = xno], [
|
||||||
enable_pyzfs=no
|
enable_pyzfs=no
|
||||||
])
|
])
|
||||||
@ -84,10 +48,10 @@ AC_DEFUN([ZFS_AC_CONFIG_ALWAYS_PYZFS], [
|
|||||||
dnl #
|
dnl #
|
||||||
dnl # Python "setuptools" module is required to build and install pyzfs
|
dnl # Python "setuptools" module is required to build and install pyzfs
|
||||||
dnl #
|
dnl #
|
||||||
AS_IF([test "x$enable_pyzfs" = xcheck], [
|
AS_IF([test "x$enable_pyzfs" = xcheck -o "x$enable_pyzfs" = xyes], [
|
||||||
ZFS_AC_PYTHON_MODULE([setuptools], [], [
|
ZFS_AC_PYTHON_MODULE([setuptools], [], [
|
||||||
AS_IF([test "x$enable_pyzfs" = xyes], [
|
AS_IF([test "x$enable_pyzfs" = xyes], [
|
||||||
AC_MSG_ERROR("python-setuptools is not installed")
|
AC_MSG_ERROR("Python $PYTHON_VERSION setuptools is not installed")
|
||||||
], [test ! "x$enable_pyzfs" = xno], [
|
], [test ! "x$enable_pyzfs" = xno], [
|
||||||
enable_pyzfs=no
|
enable_pyzfs=no
|
||||||
])
|
])
|
||||||
@ -97,10 +61,10 @@ AC_DEFUN([ZFS_AC_CONFIG_ALWAYS_PYZFS], [
|
|||||||
dnl #
|
dnl #
|
||||||
dnl # Python "cffi" module is required to run pyzfs
|
dnl # Python "cffi" module is required to run pyzfs
|
||||||
dnl #
|
dnl #
|
||||||
AS_IF([test "x$enable_pyzfs" = xcheck], [
|
AS_IF([test "x$enable_pyzfs" = xcheck -o "x$enable_pyzfs" = xyes], [
|
||||||
ZFS_AC_PYTHON_MODULE([cffi], [], [
|
ZFS_AC_PYTHON_MODULE([cffi], [], [
|
||||||
AS_IF([test "x$enable_pyzfs" = xyes], [
|
AS_IF([test "x$enable_pyzfs" = xyes], [
|
||||||
AC_MSG_ERROR("python-cffi is not installed")
|
AC_MSG_ERROR("Python $PYTHON_VERSION cffi is not installed")
|
||||||
], [test ! "x$enable_pyzfs" = xno], [
|
], [test ! "x$enable_pyzfs" = xno], [
|
||||||
enable_pyzfs=no
|
enable_pyzfs=no
|
||||||
])
|
])
|
||||||
@ -114,12 +78,8 @@ AC_DEFUN([ZFS_AC_CONFIG_ALWAYS_PYZFS], [
|
|||||||
|
|
||||||
AM_CONDITIONAL([PYZFS_ENABLED], [test x$enable_pyzfs = xyes])
|
AM_CONDITIONAL([PYZFS_ENABLED], [test x$enable_pyzfs = xyes])
|
||||||
AC_SUBST([PYZFS_ENABLED], [$enable_pyzfs])
|
AC_SUBST([PYZFS_ENABLED], [$enable_pyzfs])
|
||||||
|
|
||||||
AS_IF([test "x$enable_pyzfs" = xyes], [
|
|
||||||
DEFINE_PYZFS='--define "_pyzfs 1"'
|
|
||||||
],[
|
|
||||||
DEFINE_PYZFS=''
|
|
||||||
])
|
|
||||||
AC_SUBST(DEFINE_PYZFS)
|
|
||||||
AC_SUBST(pythonsitedir, [$PYTHON_SITE_PKG])
|
AC_SUBST(pythonsitedir, [$PYTHON_SITE_PKG])
|
||||||
|
|
||||||
|
AC_MSG_CHECKING([whether to enable pyzfs: ])
|
||||||
|
AC_MSG_RESULT($enable_pyzfs)
|
||||||
])
|
])
|
||||||
|
@ -47,7 +47,7 @@ deb-utils: deb-local rpm-utils
|
|||||||
pkg7=$${name}-test-$${version}.$${arch}.rpm; \
|
pkg7=$${name}-test-$${version}.$${arch}.rpm; \
|
||||||
pkg8=$${name}-dracut-$${version}.$${arch}.rpm; \
|
pkg8=$${name}-dracut-$${version}.$${arch}.rpm; \
|
||||||
pkg9=$${name}-initramfs-$${version}.$${arch}.rpm; \
|
pkg9=$${name}-initramfs-$${version}.$${arch}.rpm; \
|
||||||
pkg10=pyzfs-$${version}.noarch.rpm; \
|
pkg10=`ls python*-pyzfs-$${version}* | tail -1`; \
|
||||||
## Arguments need to be passed to dh_shlibdeps. Alien provides no mechanism
|
## Arguments need to be passed to dh_shlibdeps. Alien provides no mechanism
|
||||||
## to do this, so we install a shim onto the path which calls the real
|
## to do this, so we install a shim onto the path which calls the real
|
||||||
## dh_shlibdeps with the required arguments.
|
## dh_shlibdeps with the required arguments.
|
||||||
|
@ -160,6 +160,7 @@ AC_DEFUN([ZFS_AC_CONFIG_ALWAYS], [
|
|||||||
ZFS_AC_CONFIG_ALWAYS_CC_ASAN
|
ZFS_AC_CONFIG_ALWAYS_CC_ASAN
|
||||||
ZFS_AC_CONFIG_ALWAYS_TOOLCHAIN_SIMD
|
ZFS_AC_CONFIG_ALWAYS_TOOLCHAIN_SIMD
|
||||||
ZFS_AC_CONFIG_ALWAYS_ARCH
|
ZFS_AC_CONFIG_ALWAYS_ARCH
|
||||||
|
ZFS_AC_CONFIG_ALWAYS_PYTHON
|
||||||
ZFS_AC_CONFIG_ALWAYS_PYZFS
|
ZFS_AC_CONFIG_ALWAYS_PYZFS
|
||||||
])
|
])
|
||||||
|
|
||||||
@ -264,10 +265,13 @@ AC_DEFUN([ZFS_AC_RPM], [
|
|||||||
RPM_DEFINE_UTIL+=' $(DEFINE_INITRAMFS)'
|
RPM_DEFINE_UTIL+=' $(DEFINE_INITRAMFS)'
|
||||||
RPM_DEFINE_UTIL+=' $(DEFINE_SYSTEMD)'
|
RPM_DEFINE_UTIL+=' $(DEFINE_SYSTEMD)'
|
||||||
RPM_DEFINE_UTIL+=' $(DEFINE_PYZFS)'
|
RPM_DEFINE_UTIL+=' $(DEFINE_PYZFS)'
|
||||||
|
RPM_DEFINE_UTIL+=' $(DEFINE_PYTHON_VERSION)'
|
||||||
|
RPM_DEFINE_UTIL+=' $(DEFINE_PYTHON_PKG_VERSION)'
|
||||||
|
|
||||||
dnl # Override default lib directory on Debian/Ubuntu systems. The provided
|
dnl # Override default lib directory on Debian/Ubuntu systems. The
|
||||||
dnl # /usr/lib/rpm/platform/<arch>/macros files do not specify the correct
|
dnl # provided /usr/lib/rpm/platform/<arch>/macros files do not
|
||||||
dnl # path for multiarch systems as described by the packaging guidelines.
|
dnl # specify the correct path for multiarch systems as described
|
||||||
|
dnl # by the packaging guidelines.
|
||||||
dnl #
|
dnl #
|
||||||
dnl # https://wiki.ubuntu.com/MultiarchSpec
|
dnl # https://wiki.ubuntu.com/MultiarchSpec
|
||||||
dnl # https://wiki.debian.org/Multiarch/Implementation
|
dnl # https://wiki.debian.org/Multiarch/Implementation
|
||||||
|
@ -27,7 +27,7 @@ install-exec-local:
|
|||||||
$(PYTHON) $(srcdir)/setup.py install \
|
$(PYTHON) $(srcdir)/setup.py install \
|
||||||
--prefix $(prefix) \
|
--prefix $(prefix) \
|
||||||
--root $(DESTDIR)/ \
|
--root $(DESTDIR)/ \
|
||||||
--install-lib $(pythondir) \
|
--install-lib $(pythonsitedir) \
|
||||||
--single-version-externally-managed \
|
--single-version-externally-managed \
|
||||||
--verbose
|
--verbose
|
||||||
|
|
||||||
|
@ -38,6 +38,7 @@ please visit its `GitHub repository <https://github.com/zfsonlinux/zfs>`_.
|
|||||||
|
|
||||||
Maximum length of any ZFS name.
|
Maximum length of any ZFS name.
|
||||||
'''
|
'''
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
|
||||||
from ._constants import (
|
from ._constants import (
|
||||||
MAXNAMELEN,
|
MAXNAMELEN,
|
||||||
|
@ -18,10 +18,12 @@
|
|||||||
Important `libzfs_core` constants.
|
Important `libzfs_core` constants.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
|
||||||
|
|
||||||
# https://stackoverflow.com/a/1695250
|
# https://stackoverflow.com/a/1695250
|
||||||
def enum(*sequential, **named):
|
def enum(*sequential, **named):
|
||||||
enums = dict(zip(sequential, range(len(sequential))), **named)
|
enums = dict(((b, a) for a, b in enumerate(sequential)), **named)
|
||||||
return type('Enum', (), enums)
|
return type('Enum', (), enums)
|
||||||
|
|
||||||
|
|
||||||
|
@ -26,6 +26,7 @@ corresponding interface functions.
|
|||||||
|
|
||||||
The parameters and exceptions are documented in the `libzfs_core` interfaces.
|
The parameters and exceptions are documented in the `libzfs_core` interfaces.
|
||||||
"""
|
"""
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
|
||||||
import errno
|
import errno
|
||||||
import re
|
import re
|
||||||
@ -102,8 +103,9 @@ def lzc_snapshot_translate_errors(ret, errlist, snaps, props):
|
|||||||
|
|
||||||
def _map(ret, name):
|
def _map(ret, name):
|
||||||
if ret == errno.EXDEV:
|
if ret == errno.EXDEV:
|
||||||
pool_names = map(_pool_name, snaps)
|
pool_names = iter(map(_pool_name, snaps))
|
||||||
same_pool = all(x == pool_names[0] for x in pool_names)
|
pool_name = next(pool_names, None)
|
||||||
|
same_pool = all(x == pool_name for x in pool_names)
|
||||||
if same_pool:
|
if same_pool:
|
||||||
return lzc_exc.DuplicateSnapshots(name)
|
return lzc_exc.DuplicateSnapshots(name)
|
||||||
else:
|
else:
|
||||||
@ -270,7 +272,8 @@ def lzc_hold_translate_errors(ret, errlist, holds, fd):
|
|||||||
def lzc_release_translate_errors(ret, errlist, holds):
|
def lzc_release_translate_errors(ret, errlist, holds):
|
||||||
if ret == 0:
|
if ret == 0:
|
||||||
return
|
return
|
||||||
for _, hold_list in holds.iteritems():
|
for snap in holds:
|
||||||
|
hold_list = holds[snap]
|
||||||
if not isinstance(hold_list, list):
|
if not isinstance(hold_list, list):
|
||||||
raise lzc_exc.TypeError('holds must be in a list')
|
raise lzc_exc.TypeError('holds must be in a list')
|
||||||
|
|
||||||
@ -705,15 +708,17 @@ def _handle_err_list(ret, errlist, names, exception, mapper):
|
|||||||
|
|
||||||
if len(errlist) == 0:
|
if len(errlist) == 0:
|
||||||
suppressed_count = 0
|
suppressed_count = 0
|
||||||
|
names = list(zip(names, range(2)))
|
||||||
if len(names) == 1:
|
if len(names) == 1:
|
||||||
name = names[0]
|
name, _ = names[0]
|
||||||
else:
|
else:
|
||||||
name = None
|
name = None
|
||||||
errors = [mapper(ret, name)]
|
errors = [mapper(ret, name)]
|
||||||
else:
|
else:
|
||||||
errors = []
|
errors = []
|
||||||
suppressed_count = errlist.pop('N_MORE_ERRORS', 0)
|
suppressed_count = errlist.pop('N_MORE_ERRORS', 0)
|
||||||
for name, err in errlist.iteritems():
|
for name in errlist:
|
||||||
|
err = errlist[name]
|
||||||
errors.append(mapper(err, name))
|
errors.append(mapper(err, name))
|
||||||
|
|
||||||
raise exception(errors, suppressed_count)
|
raise exception(errors, suppressed_count)
|
||||||
@ -727,7 +732,7 @@ def _pool_name(name):
|
|||||||
'@' separates a snapshot name from the rest of the dataset name.
|
'@' separates a snapshot name from the rest of the dataset name.
|
||||||
'#' separates a bookmark name from the rest of the dataset name.
|
'#' separates a bookmark name from the rest of the dataset name.
|
||||||
'''
|
'''
|
||||||
return re.split('[/@#]', name, 1)[0]
|
return re.split(b'[/@#]', name, 1)[0]
|
||||||
|
|
||||||
|
|
||||||
def _fs_name(name):
|
def _fs_name(name):
|
||||||
@ -737,26 +742,26 @@ def _fs_name(name):
|
|||||||
'@' separates a snapshot name from the rest of the dataset name.
|
'@' separates a snapshot name from the rest of the dataset name.
|
||||||
'#' separates a bookmark name from the rest of the dataset name.
|
'#' separates a bookmark name from the rest of the dataset name.
|
||||||
'''
|
'''
|
||||||
return re.split('[@#]', name, 1)[0]
|
return re.split(b'[@#]', name, 1)[0]
|
||||||
|
|
||||||
|
|
||||||
def _is_valid_name_component(component):
|
def _is_valid_name_component(component):
|
||||||
allowed = string.ascii_letters + string.digits + '-_.: '
|
allowed = string.ascii_letters + string.digits + u'-_.: '
|
||||||
return component and all(x in allowed for x in component)
|
return component and all(x in allowed.encode() for x in component)
|
||||||
|
|
||||||
|
|
||||||
def _is_valid_fs_name(name):
|
def _is_valid_fs_name(name):
|
||||||
return name and all(_is_valid_name_component(c) for c in name.split('/'))
|
return name and all(_is_valid_name_component(c) for c in name.split(b'/'))
|
||||||
|
|
||||||
|
|
||||||
def _is_valid_snap_name(name):
|
def _is_valid_snap_name(name):
|
||||||
parts = name.split('@')
|
parts = name.split(b'@')
|
||||||
return (len(parts) == 2 and _is_valid_fs_name(parts[0]) and
|
return (len(parts) == 2 and _is_valid_fs_name(parts[0]) and
|
||||||
_is_valid_name_component(parts[1]))
|
_is_valid_name_component(parts[1]))
|
||||||
|
|
||||||
|
|
||||||
def _is_valid_bmark_name(name):
|
def _is_valid_bmark_name(name):
|
||||||
parts = name.split('#')
|
parts = name.split(b'#')
|
||||||
return (len(parts) == 2 and _is_valid_fs_name(parts[0]) and
|
return (len(parts) == 2 and _is_valid_fs_name(parts[0]) and
|
||||||
_is_valid_name_component(parts[1]))
|
_is_valid_name_component(parts[1]))
|
||||||
|
|
||||||
|
@ -26,6 +26,7 @@ increased convenience. Output parameters are not used and return values
|
|||||||
are directly returned. Error conditions are signalled by exceptions
|
are directly returned. Error conditions are signalled by exceptions
|
||||||
rather than by integer error codes.
|
rather than by integer error codes.
|
||||||
"""
|
"""
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
|
||||||
import errno
|
import errno
|
||||||
import functools
|
import functools
|
||||||
@ -112,7 +113,7 @@ def lzc_create(name, ds_type='zfs', props=None, key=None):
|
|||||||
if props is None:
|
if props is None:
|
||||||
props = {}
|
props = {}
|
||||||
if key is None:
|
if key is None:
|
||||||
key = bytes("")
|
key = b""
|
||||||
else:
|
else:
|
||||||
key = bytes(key)
|
key = bytes(key)
|
||||||
if ds_type == 'zfs':
|
if ds_type == 'zfs':
|
||||||
@ -485,8 +486,8 @@ def lzc_hold(holds, fd=None):
|
|||||||
errors.lzc_hold_translate_errors(ret, errlist, holds, fd)
|
errors.lzc_hold_translate_errors(ret, errlist, holds, fd)
|
||||||
# If there is no error (no exception raised by _handleErrList), but errlist
|
# If there is no error (no exception raised by _handleErrList), but errlist
|
||||||
# is not empty, then it contains missing snapshots.
|
# is not empty, then it contains missing snapshots.
|
||||||
assert all(x == errno.ENOENT for x in errlist.itervalues())
|
assert all(errlist[x] == errno.ENOENT for x in errlist)
|
||||||
return errlist.keys()
|
return list(errlist.keys())
|
||||||
|
|
||||||
|
|
||||||
def lzc_release(holds):
|
def lzc_release(holds):
|
||||||
@ -521,7 +522,8 @@ def lzc_release(holds):
|
|||||||
'''
|
'''
|
||||||
errlist = {}
|
errlist = {}
|
||||||
holds_dict = {}
|
holds_dict = {}
|
||||||
for snap, hold_list in holds.iteritems():
|
for snap in holds:
|
||||||
|
hold_list = holds[snap]
|
||||||
if not isinstance(hold_list, list):
|
if not isinstance(hold_list, list):
|
||||||
raise TypeError('holds must be in a list')
|
raise TypeError('holds must be in a list')
|
||||||
holds_dict[snap] = {hold: None for hold in hold_list}
|
holds_dict[snap] = {hold: None for hold in hold_list}
|
||||||
@ -531,8 +533,8 @@ def lzc_release(holds):
|
|||||||
errors.lzc_release_translate_errors(ret, errlist, holds)
|
errors.lzc_release_translate_errors(ret, errlist, holds)
|
||||||
# If there is no error (no exception raised by _handleErrList), but errlist
|
# If there is no error (no exception raised by _handleErrList), but errlist
|
||||||
# is not empty, then it contains missing snapshots and tags.
|
# is not empty, then it contains missing snapshots and tags.
|
||||||
assert all(x == errno.ENOENT for x in errlist.itervalues())
|
assert all(errlist[x] == errno.ENOENT for x in errlist)
|
||||||
return errlist.keys()
|
return list(errlist.keys())
|
||||||
|
|
||||||
|
|
||||||
def lzc_get_holds(snapname):
|
def lzc_get_holds(snapname):
|
||||||
@ -846,7 +848,7 @@ def lzc_change_key(fsname, crypt_cmd, props=None, key=None):
|
|||||||
if props is None:
|
if props is None:
|
||||||
props = {}
|
props = {}
|
||||||
if key is None:
|
if key is None:
|
||||||
key = bytes("")
|
key = b""
|
||||||
else:
|
else:
|
||||||
key = bytes(key)
|
key = bytes(key)
|
||||||
cmd = {
|
cmd = {
|
||||||
@ -929,13 +931,13 @@ def lzc_channel_program(
|
|||||||
error.
|
error.
|
||||||
'''
|
'''
|
||||||
output = {}
|
output = {}
|
||||||
params_nv = nvlist_in({"argv": params})
|
params_nv = nvlist_in({b"argv": params})
|
||||||
with nvlist_out(output) as outnvl:
|
with nvlist_out(output) as outnvl:
|
||||||
ret = _lib.lzc_channel_program(
|
ret = _lib.lzc_channel_program(
|
||||||
poolname, program, instrlimit, memlimit, params_nv, outnvl)
|
poolname, program, instrlimit, memlimit, params_nv, outnvl)
|
||||||
errors.lzc_channel_program_translate_error(
|
errors.lzc_channel_program_translate_error(
|
||||||
ret, poolname, output.get("error"))
|
ret, poolname, output.get(b"error"))
|
||||||
return output.get("return")
|
return output.get(b"return")
|
||||||
|
|
||||||
|
|
||||||
def lzc_channel_program_nosync(
|
def lzc_channel_program_nosync(
|
||||||
@ -974,13 +976,13 @@ def lzc_channel_program_nosync(
|
|||||||
error.
|
error.
|
||||||
'''
|
'''
|
||||||
output = {}
|
output = {}
|
||||||
params_nv = nvlist_in({"argv": params})
|
params_nv = nvlist_in({b"argv": params})
|
||||||
with nvlist_out(output) as outnvl:
|
with nvlist_out(output) as outnvl:
|
||||||
ret = _lib.lzc_channel_program_nosync(
|
ret = _lib.lzc_channel_program_nosync(
|
||||||
poolname, program, instrlimit, memlimit, params_nv, outnvl)
|
poolname, program, instrlimit, memlimit, params_nv, outnvl)
|
||||||
errors.lzc_channel_program_translate_error(
|
errors.lzc_channel_program_translate_error(
|
||||||
ret, poolname, output.get("error"))
|
ret, poolname, output.get(b"error"))
|
||||||
return output.get("return")
|
return output.get(b"return")
|
||||||
|
|
||||||
|
|
||||||
def lzc_receive_resumable(
|
def lzc_receive_resumable(
|
||||||
@ -1404,7 +1406,7 @@ def lzc_receive_with_cmdprops(
|
|||||||
if cmdprops is None:
|
if cmdprops is None:
|
||||||
cmdprops = {}
|
cmdprops = {}
|
||||||
if key is None:
|
if key is None:
|
||||||
key = bytes("")
|
key = b""
|
||||||
else:
|
else:
|
||||||
key = bytes(key)
|
key = bytes(key)
|
||||||
|
|
||||||
@ -1509,7 +1511,7 @@ def lzc_sync(poolname, force=False):
|
|||||||
`innvl` has been replaced by the `force` boolean and `outnvl` has been
|
`innvl` has been replaced by the `force` boolean and `outnvl` has been
|
||||||
conveniently removed since it's not used.
|
conveniently removed since it's not used.
|
||||||
'''
|
'''
|
||||||
innvl = nvlist_in({"force": force})
|
innvl = nvlist_in({b"force": force})
|
||||||
with nvlist_out({}) as outnvl:
|
with nvlist_out({}) as outnvl:
|
||||||
ret = _lib.lzc_sync(poolname, innvl, outnvl)
|
ret = _lib.lzc_sync(poolname, innvl, outnvl)
|
||||||
errors.lzc_sync_translate_error(ret, poolname)
|
errors.lzc_sync_translate_error(ret, poolname)
|
||||||
@ -1873,9 +1875,9 @@ def lzc_get_props(name):
|
|||||||
mountpoint_val = '/' + name
|
mountpoint_val = '/' + name
|
||||||
else:
|
else:
|
||||||
mountpoint_val = None
|
mountpoint_val = None
|
||||||
result = {k: v['value'] for k, v in result.iteritems()}
|
result = {k: result[k]['value'] for k in result}
|
||||||
if 'clones' in result:
|
if 'clones' in result:
|
||||||
result['clones'] = result['clones'].keys()
|
result['clones'] = list(result['clones'].keys())
|
||||||
if mountpoint_val is not None:
|
if mountpoint_val is not None:
|
||||||
result['mountpoint'] = mountpoint_val
|
result['mountpoint'] = mountpoint_val
|
||||||
return result
|
return result
|
||||||
|
@ -47,6 +47,7 @@ Format:
|
|||||||
- a value can be a list of dictionaries that adhere to this format
|
- a value can be a list of dictionaries that adhere to this format
|
||||||
- all elements of a list value must be of the same type
|
- all elements of a list value must be of the same type
|
||||||
"""
|
"""
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
|
||||||
import numbers
|
import numbers
|
||||||
from collections import namedtuple
|
from collections import namedtuple
|
||||||
@ -159,10 +160,10 @@ def _type_info(typeid):
|
|||||||
|
|
||||||
# only integer properties need to be here
|
# only integer properties need to be here
|
||||||
_prop_name_to_type_str = {
|
_prop_name_to_type_str = {
|
||||||
"rewind-request": "uint32",
|
b"rewind-request": "uint32",
|
||||||
"type": "uint32",
|
b"type": "uint32",
|
||||||
"N_MORE_ERRORS": "int32",
|
b"N_MORE_ERRORS": "int32",
|
||||||
"pool_context": "int32",
|
b"pool_context": "int32",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -19,6 +19,7 @@ The package that contains a module per each C library that
|
|||||||
`libzfs_core` uses. The modules expose CFFI objects required
|
`libzfs_core` uses. The modules expose CFFI objects required
|
||||||
to make calls to functions in the libraries.
|
to make calls to functions in the libraries.
|
||||||
"""
|
"""
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
|
||||||
import threading
|
import threading
|
||||||
import importlib
|
import importlib
|
||||||
@ -47,7 +48,7 @@ def _setup_cffi():
|
|||||||
ffi = FFI()
|
ffi = FFI()
|
||||||
|
|
||||||
for module_name in MODULES:
|
for module_name in MODULES:
|
||||||
module = importlib.import_module("." + module_name, __package__)
|
module = importlib.import_module("." + module_name, __name__)
|
||||||
ffi.cdef(module.CDEF)
|
ffi.cdef(module.CDEF)
|
||||||
lib = LazyLibrary(ffi, module.LIBRARY)
|
lib = LazyLibrary(ffi, module.LIBRARY)
|
||||||
setattr(module, "ffi", ffi)
|
setattr(module, "ffi", ffi)
|
||||||
|
@ -17,6 +17,7 @@
|
|||||||
"""
|
"""
|
||||||
Python bindings for ``libnvpair``.
|
Python bindings for ``libnvpair``.
|
||||||
"""
|
"""
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
|
||||||
CDEF = """
|
CDEF = """
|
||||||
typedef ... nvlist_t;
|
typedef ... nvlist_t;
|
||||||
|
@ -17,6 +17,7 @@
|
|||||||
"""
|
"""
|
||||||
Python bindings for ``libzfs_core``.
|
Python bindings for ``libzfs_core``.
|
||||||
"""
|
"""
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
|
||||||
CDEF = """
|
CDEF = """
|
||||||
|
|
||||||
|
@ -17,6 +17,7 @@
|
|||||||
"""
|
"""
|
||||||
Utility functions for casting to a specific C type.
|
Utility functions for casting to a specific C type.
|
||||||
"""
|
"""
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
|
||||||
from .bindings.libnvpair import ffi as _ffi
|
from .bindings.libnvpair import ffi as _ffi
|
||||||
|
|
||||||
@ -30,8 +31,8 @@ def _ffi_cast(type_name):
|
|||||||
try:
|
try:
|
||||||
type_info.elements[value]
|
type_info.elements[value]
|
||||||
except KeyError as e:
|
except KeyError as e:
|
||||||
raise OverflowError('Invalid enum <%s> value %s' %
|
raise OverflowError('Invalid enum <%s> value %s: %s' %
|
||||||
(type_info.cname, e.message))
|
(type_info.cname, value, e))
|
||||||
else:
|
else:
|
||||||
_ffi.new(type_name + '*', value)
|
_ffi.new(type_name + '*', value)
|
||||||
return _ffi.cast(type_name, value)
|
return _ffi.cast(type_name, value)
|
||||||
|
@ -17,6 +17,7 @@
|
|||||||
"""
|
"""
|
||||||
Exceptions that can be raised by libzfs_core operations.
|
Exceptions that can be raised by libzfs_core operations.
|
||||||
"""
|
"""
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
|
||||||
import errno
|
import errno
|
||||||
from ._constants import (
|
from ._constants import (
|
||||||
|
File diff suppressed because it is too large
Load Diff
@ -21,6 +21,7 @@ and verify that no information is lost and value types are correct.
|
|||||||
The tests also check that various error conditions like unsupported
|
The tests also check that various error conditions like unsupported
|
||||||
value types or out of bounds values are detected.
|
value types or out of bounds values are detected.
|
||||||
"""
|
"""
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
@ -43,25 +44,25 @@ class TestNVList(unittest.TestCase):
|
|||||||
def _assertIntDictsEqual(self, dict1, dict2):
|
def _assertIntDictsEqual(self, dict1, dict2):
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
len(dict1), len(dict1),
|
len(dict1), len(dict1),
|
||||||
"resulting dictionary is of different size")
|
b"resulting dictionary is of different size")
|
||||||
for key in dict1.keys():
|
for key in dict1.keys():
|
||||||
self.assertEqual(int(dict1[key]), int(dict2[key]))
|
self.assertEqual(int(dict1[key]), int(dict2[key]))
|
||||||
|
|
||||||
def _assertIntArrayDictsEqual(self, dict1, dict2):
|
def _assertIntArrayDictsEqual(self, dict1, dict2):
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
len(dict1), len(dict1),
|
len(dict1), len(dict1),
|
||||||
"resulting dictionary is of different size")
|
b"resulting dictionary is of different size")
|
||||||
for key in dict1.keys():
|
for key in dict1.keys():
|
||||||
val1 = dict1[key]
|
val1 = dict1[key]
|
||||||
val2 = dict2[key]
|
val2 = dict2[key]
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
len(val1), len(val2), "array values of different sizes")
|
len(val1), len(val2), b"array values of different sizes")
|
||||||
for x, y in zip(val1, val2):
|
for x, y in zip(val1, val2):
|
||||||
self.assertEqual(int(x), int(y))
|
self.assertEqual(int(x), int(y))
|
||||||
|
|
||||||
def test_empty(self):
|
def test_empty(self):
|
||||||
res = self._dict_to_nvlist_to_dict({})
|
res = self._dict_to_nvlist_to_dict({})
|
||||||
self.assertEqual(len(res), 0, "expected empty dict")
|
self.assertEqual(len(res), 0, b"expected empty dict")
|
||||||
|
|
||||||
def test_invalid_key_type(self):
|
def test_invalid_key_type(self):
|
||||||
with self.assertRaises(TypeError):
|
with self.assertRaises(TypeError):
|
||||||
@ -69,564 +70,564 @@ class TestNVList(unittest.TestCase):
|
|||||||
|
|
||||||
def test_invalid_val_type__tuple(self):
|
def test_invalid_val_type__tuple(self):
|
||||||
with self.assertRaises(TypeError):
|
with self.assertRaises(TypeError):
|
||||||
self._dict_to_nvlist_to_dict({"key": (1, 2)})
|
self._dict_to_nvlist_to_dict({b"key": (1, 2)})
|
||||||
|
|
||||||
def test_invalid_val_type__set(self):
|
def test_invalid_val_type__set(self):
|
||||||
with self.assertRaises(TypeError):
|
with self.assertRaises(TypeError):
|
||||||
self._dict_to_nvlist_to_dict({"key": set(1, 2)})
|
self._dict_to_nvlist_to_dict({b"key": set(1, 2)})
|
||||||
|
|
||||||
def test_invalid_array_val_type(self):
|
def test_invalid_array_val_type(self):
|
||||||
with self.assertRaises(TypeError):
|
with self.assertRaises(TypeError):
|
||||||
self._dict_to_nvlist_to_dict({"key": [(1, 2), (3, 4)]})
|
self._dict_to_nvlist_to_dict({b"key": [(1, 2), (3, 4)]})
|
||||||
|
|
||||||
def test_invalid_array_of_arrays_val_type(self):
|
def test_invalid_array_of_arrays_val_type(self):
|
||||||
with self.assertRaises(TypeError):
|
with self.assertRaises(TypeError):
|
||||||
self._dict_to_nvlist_to_dict({"key": [[1, 2], [3, 4]]})
|
self._dict_to_nvlist_to_dict({b"key": [[1, 2], [3, 4]]})
|
||||||
|
|
||||||
def test_string_value(self):
|
def test_string_value(self):
|
||||||
props = {"key": "value"}
|
props = {b"key": b"value"}
|
||||||
res = self._dict_to_nvlist_to_dict(props)
|
res = self._dict_to_nvlist_to_dict(props)
|
||||||
self.assertEqual(props, res)
|
self.assertEqual(props, res)
|
||||||
|
|
||||||
def test_implicit_boolean_value(self):
|
def test_implicit_boolean_value(self):
|
||||||
props = {"key": None}
|
props = {b"key": None}
|
||||||
res = self._dict_to_nvlist_to_dict(props)
|
res = self._dict_to_nvlist_to_dict(props)
|
||||||
self.assertEqual(props, res)
|
self.assertEqual(props, res)
|
||||||
|
|
||||||
def test_boolean_values(self):
|
def test_boolean_values(self):
|
||||||
props = {"key1": True, "key2": False}
|
props = {b"key1": True, b"key2": False}
|
||||||
res = self._dict_to_nvlist_to_dict(props)
|
res = self._dict_to_nvlist_to_dict(props)
|
||||||
self.assertEqual(props, res)
|
self.assertEqual(props, res)
|
||||||
|
|
||||||
def test_explicit_boolean_true_value(self):
|
def test_explicit_boolean_true_value(self):
|
||||||
props = {"key": boolean_t(1)}
|
props = {b"key": boolean_t(1)}
|
||||||
res = self._dict_to_nvlist_to_dict(props)
|
res = self._dict_to_nvlist_to_dict(props)
|
||||||
self._assertIntDictsEqual(props, res)
|
self._assertIntDictsEqual(props, res)
|
||||||
|
|
||||||
def test_explicit_boolean_false_value(self):
|
def test_explicit_boolean_false_value(self):
|
||||||
props = {"key": boolean_t(0)}
|
props = {b"key": boolean_t(0)}
|
||||||
res = self._dict_to_nvlist_to_dict(props)
|
res = self._dict_to_nvlist_to_dict(props)
|
||||||
self._assertIntDictsEqual(props, res)
|
self._assertIntDictsEqual(props, res)
|
||||||
|
|
||||||
def test_explicit_boolean_invalid_value(self):
|
def test_explicit_boolean_invalid_value(self):
|
||||||
with self.assertRaises(OverflowError):
|
with self.assertRaises(OverflowError):
|
||||||
props = {"key": boolean_t(2)}
|
props = {b"key": boolean_t(2)}
|
||||||
self._dict_to_nvlist_to_dict(props)
|
self._dict_to_nvlist_to_dict(props)
|
||||||
|
|
||||||
def test_explicit_boolean_another_invalid_value(self):
|
def test_explicit_boolean_another_invalid_value(self):
|
||||||
with self.assertRaises(OverflowError):
|
with self.assertRaises(OverflowError):
|
||||||
props = {"key": boolean_t(-1)}
|
props = {b"key": boolean_t(-1)}
|
||||||
self._dict_to_nvlist_to_dict(props)
|
self._dict_to_nvlist_to_dict(props)
|
||||||
|
|
||||||
def test_uint64_value(self):
|
def test_uint64_value(self):
|
||||||
props = {"key": 1}
|
props = {b"key": 1}
|
||||||
res = self._dict_to_nvlist_to_dict(props)
|
res = self._dict_to_nvlist_to_dict(props)
|
||||||
self.assertEqual(props, res)
|
self.assertEqual(props, res)
|
||||||
|
|
||||||
def test_uint64_max_value(self):
|
def test_uint64_max_value(self):
|
||||||
props = {"key": 2 ** 64 - 1}
|
props = {b"key": 2 ** 64 - 1}
|
||||||
res = self._dict_to_nvlist_to_dict(props)
|
res = self._dict_to_nvlist_to_dict(props)
|
||||||
self.assertEqual(props, res)
|
self.assertEqual(props, res)
|
||||||
|
|
||||||
def test_uint64_too_large_value(self):
|
def test_uint64_too_large_value(self):
|
||||||
props = {"key": 2 ** 64}
|
props = {b"key": 2 ** 64}
|
||||||
with self.assertRaises(OverflowError):
|
with self.assertRaises(OverflowError):
|
||||||
self._dict_to_nvlist_to_dict(props)
|
self._dict_to_nvlist_to_dict(props)
|
||||||
|
|
||||||
def test_uint64_negative_value(self):
|
def test_uint64_negative_value(self):
|
||||||
props = {"key": -1}
|
props = {b"key": -1}
|
||||||
with self.assertRaises(OverflowError):
|
with self.assertRaises(OverflowError):
|
||||||
self._dict_to_nvlist_to_dict(props)
|
self._dict_to_nvlist_to_dict(props)
|
||||||
|
|
||||||
def test_explicit_uint64_value(self):
|
def test_explicit_uint64_value(self):
|
||||||
props = {"key": uint64_t(1)}
|
props = {b"key": uint64_t(1)}
|
||||||
res = self._dict_to_nvlist_to_dict(props)
|
res = self._dict_to_nvlist_to_dict(props)
|
||||||
self._assertIntDictsEqual(props, res)
|
self._assertIntDictsEqual(props, res)
|
||||||
|
|
||||||
def test_explicit_uint64_max_value(self):
|
def test_explicit_uint64_max_value(self):
|
||||||
props = {"key": uint64_t(2 ** 64 - 1)}
|
props = {b"key": uint64_t(2 ** 64 - 1)}
|
||||||
res = self._dict_to_nvlist_to_dict(props)
|
res = self._dict_to_nvlist_to_dict(props)
|
||||||
self._assertIntDictsEqual(props, res)
|
self._assertIntDictsEqual(props, res)
|
||||||
|
|
||||||
def test_explicit_uint64_too_large_value(self):
|
def test_explicit_uint64_too_large_value(self):
|
||||||
with self.assertRaises(OverflowError):
|
with self.assertRaises(OverflowError):
|
||||||
props = {"key": uint64_t(2 ** 64)}
|
props = {b"key": uint64_t(2 ** 64)}
|
||||||
self._dict_to_nvlist_to_dict(props)
|
self._dict_to_nvlist_to_dict(props)
|
||||||
|
|
||||||
def test_explicit_uint64_negative_value(self):
|
def test_explicit_uint64_negative_value(self):
|
||||||
with self.assertRaises(OverflowError):
|
with self.assertRaises(OverflowError):
|
||||||
props = {"key": uint64_t(-1)}
|
props = {b"key": uint64_t(-1)}
|
||||||
self._dict_to_nvlist_to_dict(props)
|
self._dict_to_nvlist_to_dict(props)
|
||||||
|
|
||||||
def test_explicit_uint32_value(self):
|
def test_explicit_uint32_value(self):
|
||||||
props = {"key": uint32_t(1)}
|
props = {b"key": uint32_t(1)}
|
||||||
res = self._dict_to_nvlist_to_dict(props)
|
res = self._dict_to_nvlist_to_dict(props)
|
||||||
self._assertIntDictsEqual(props, res)
|
self._assertIntDictsEqual(props, res)
|
||||||
|
|
||||||
def test_explicit_uint32_max_value(self):
|
def test_explicit_uint32_max_value(self):
|
||||||
props = {"key": uint32_t(2 ** 32 - 1)}
|
props = {b"key": uint32_t(2 ** 32 - 1)}
|
||||||
res = self._dict_to_nvlist_to_dict(props)
|
res = self._dict_to_nvlist_to_dict(props)
|
||||||
self._assertIntDictsEqual(props, res)
|
self._assertIntDictsEqual(props, res)
|
||||||
|
|
||||||
def test_explicit_uint32_too_large_value(self):
|
def test_explicit_uint32_too_large_value(self):
|
||||||
with self.assertRaises(OverflowError):
|
with self.assertRaises(OverflowError):
|
||||||
props = {"key": uint32_t(2 ** 32)}
|
props = {b"key": uint32_t(2 ** 32)}
|
||||||
self._dict_to_nvlist_to_dict(props)
|
self._dict_to_nvlist_to_dict(props)
|
||||||
|
|
||||||
def test_explicit_uint32_negative_value(self):
|
def test_explicit_uint32_negative_value(self):
|
||||||
with self.assertRaises(OverflowError):
|
with self.assertRaises(OverflowError):
|
||||||
props = {"key": uint32_t(-1)}
|
props = {b"key": uint32_t(-1)}
|
||||||
self._dict_to_nvlist_to_dict(props)
|
self._dict_to_nvlist_to_dict(props)
|
||||||
|
|
||||||
def test_explicit_uint16_value(self):
|
def test_explicit_uint16_value(self):
|
||||||
props = {"key": uint16_t(1)}
|
props = {b"key": uint16_t(1)}
|
||||||
res = self._dict_to_nvlist_to_dict(props)
|
res = self._dict_to_nvlist_to_dict(props)
|
||||||
self._assertIntDictsEqual(props, res)
|
self._assertIntDictsEqual(props, res)
|
||||||
|
|
||||||
def test_explicit_uint16_max_value(self):
|
def test_explicit_uint16_max_value(self):
|
||||||
props = {"key": uint16_t(2 ** 16 - 1)}
|
props = {b"key": uint16_t(2 ** 16 - 1)}
|
||||||
res = self._dict_to_nvlist_to_dict(props)
|
res = self._dict_to_nvlist_to_dict(props)
|
||||||
self._assertIntDictsEqual(props, res)
|
self._assertIntDictsEqual(props, res)
|
||||||
|
|
||||||
def test_explicit_uint16_too_large_value(self):
|
def test_explicit_uint16_too_large_value(self):
|
||||||
with self.assertRaises(OverflowError):
|
with self.assertRaises(OverflowError):
|
||||||
props = {"key": uint16_t(2 ** 16)}
|
props = {b"key": uint16_t(2 ** 16)}
|
||||||
self._dict_to_nvlist_to_dict(props)
|
self._dict_to_nvlist_to_dict(props)
|
||||||
|
|
||||||
def test_explicit_uint16_negative_value(self):
|
def test_explicit_uint16_negative_value(self):
|
||||||
with self.assertRaises(OverflowError):
|
with self.assertRaises(OverflowError):
|
||||||
props = {"key": uint16_t(-1)}
|
props = {b"key": uint16_t(-1)}
|
||||||
self._dict_to_nvlist_to_dict(props)
|
self._dict_to_nvlist_to_dict(props)
|
||||||
|
|
||||||
def test_explicit_uint8_value(self):
|
def test_explicit_uint8_value(self):
|
||||||
props = {"key": uint8_t(1)}
|
props = {b"key": uint8_t(1)}
|
||||||
res = self._dict_to_nvlist_to_dict(props)
|
res = self._dict_to_nvlist_to_dict(props)
|
||||||
self._assertIntDictsEqual(props, res)
|
self._assertIntDictsEqual(props, res)
|
||||||
|
|
||||||
def test_explicit_uint8_max_value(self):
|
def test_explicit_uint8_max_value(self):
|
||||||
props = {"key": uint8_t(2 ** 8 - 1)}
|
props = {b"key": uint8_t(2 ** 8 - 1)}
|
||||||
res = self._dict_to_nvlist_to_dict(props)
|
res = self._dict_to_nvlist_to_dict(props)
|
||||||
self._assertIntDictsEqual(props, res)
|
self._assertIntDictsEqual(props, res)
|
||||||
|
|
||||||
def test_explicit_uint8_too_large_value(self):
|
def test_explicit_uint8_too_large_value(self):
|
||||||
with self.assertRaises(OverflowError):
|
with self.assertRaises(OverflowError):
|
||||||
props = {"key": uint8_t(2 ** 8)}
|
props = {b"key": uint8_t(2 ** 8)}
|
||||||
self._dict_to_nvlist_to_dict(props)
|
self._dict_to_nvlist_to_dict(props)
|
||||||
|
|
||||||
def test_explicit_uint8_negative_value(self):
|
def test_explicit_uint8_negative_value(self):
|
||||||
with self.assertRaises(OverflowError):
|
with self.assertRaises(OverflowError):
|
||||||
props = {"key": uint8_t(-1)}
|
props = {b"key": uint8_t(-1)}
|
||||||
self._dict_to_nvlist_to_dict(props)
|
self._dict_to_nvlist_to_dict(props)
|
||||||
|
|
||||||
def test_explicit_byte_value(self):
|
def test_explicit_byte_value(self):
|
||||||
props = {"key": uchar_t(1)}
|
props = {b"key": uchar_t(1)}
|
||||||
res = self._dict_to_nvlist_to_dict(props)
|
res = self._dict_to_nvlist_to_dict(props)
|
||||||
self._assertIntDictsEqual(props, res)
|
self._assertIntDictsEqual(props, res)
|
||||||
|
|
||||||
def test_explicit_byte_max_value(self):
|
def test_explicit_byte_max_value(self):
|
||||||
props = {"key": uchar_t(2 ** 8 - 1)}
|
props = {b"key": uchar_t(2 ** 8 - 1)}
|
||||||
res = self._dict_to_nvlist_to_dict(props)
|
res = self._dict_to_nvlist_to_dict(props)
|
||||||
self._assertIntDictsEqual(props, res)
|
self._assertIntDictsEqual(props, res)
|
||||||
|
|
||||||
def test_explicit_byte_too_large_value(self):
|
def test_explicit_byte_too_large_value(self):
|
||||||
with self.assertRaises(OverflowError):
|
with self.assertRaises(OverflowError):
|
||||||
props = {"key": uchar_t(2 ** 8)}
|
props = {b"key": uchar_t(2 ** 8)}
|
||||||
self._dict_to_nvlist_to_dict(props)
|
self._dict_to_nvlist_to_dict(props)
|
||||||
|
|
||||||
def test_explicit_byte_negative_value(self):
|
def test_explicit_byte_negative_value(self):
|
||||||
with self.assertRaises(OverflowError):
|
with self.assertRaises(OverflowError):
|
||||||
props = {"key": uchar_t(-1)}
|
props = {b"key": uchar_t(-1)}
|
||||||
self._dict_to_nvlist_to_dict(props)
|
self._dict_to_nvlist_to_dict(props)
|
||||||
|
|
||||||
def test_explicit_int64_value(self):
|
def test_explicit_int64_value(self):
|
||||||
props = {"key": int64_t(1)}
|
props = {b"key": int64_t(1)}
|
||||||
res = self._dict_to_nvlist_to_dict(props)
|
res = self._dict_to_nvlist_to_dict(props)
|
||||||
self._assertIntDictsEqual(props, res)
|
self._assertIntDictsEqual(props, res)
|
||||||
|
|
||||||
def test_explicit_int64_max_value(self):
|
def test_explicit_int64_max_value(self):
|
||||||
props = {"key": int64_t(2 ** 63 - 1)}
|
props = {b"key": int64_t(2 ** 63 - 1)}
|
||||||
res = self._dict_to_nvlist_to_dict(props)
|
res = self._dict_to_nvlist_to_dict(props)
|
||||||
self._assertIntDictsEqual(props, res)
|
self._assertIntDictsEqual(props, res)
|
||||||
|
|
||||||
def test_explicit_int64_min_value(self):
|
def test_explicit_int64_min_value(self):
|
||||||
props = {"key": int64_t(-(2 ** 63))}
|
props = {b"key": int64_t(-(2 ** 63))}
|
||||||
res = self._dict_to_nvlist_to_dict(props)
|
res = self._dict_to_nvlist_to_dict(props)
|
||||||
self._assertIntDictsEqual(props, res)
|
self._assertIntDictsEqual(props, res)
|
||||||
|
|
||||||
def test_explicit_int64_too_large_value(self):
|
def test_explicit_int64_too_large_value(self):
|
||||||
with self.assertRaises(OverflowError):
|
with self.assertRaises(OverflowError):
|
||||||
props = {"key": int64_t(2 ** 63)}
|
props = {b"key": int64_t(2 ** 63)}
|
||||||
self._dict_to_nvlist_to_dict(props)
|
self._dict_to_nvlist_to_dict(props)
|
||||||
|
|
||||||
def test_explicit_int64_too_small_value(self):
|
def test_explicit_int64_too_small_value(self):
|
||||||
with self.assertRaises(OverflowError):
|
with self.assertRaises(OverflowError):
|
||||||
props = {"key": int64_t(-(2 ** 63) - 1)}
|
props = {b"key": int64_t(-(2 ** 63) - 1)}
|
||||||
self._dict_to_nvlist_to_dict(props)
|
self._dict_to_nvlist_to_dict(props)
|
||||||
|
|
||||||
def test_explicit_int32_value(self):
|
def test_explicit_int32_value(self):
|
||||||
props = {"key": int32_t(1)}
|
props = {b"key": int32_t(1)}
|
||||||
res = self._dict_to_nvlist_to_dict(props)
|
res = self._dict_to_nvlist_to_dict(props)
|
||||||
self._assertIntDictsEqual(props, res)
|
self._assertIntDictsEqual(props, res)
|
||||||
|
|
||||||
def test_explicit_int32_max_value(self):
|
def test_explicit_int32_max_value(self):
|
||||||
props = {"key": int32_t(2 ** 31 - 1)}
|
props = {b"key": int32_t(2 ** 31 - 1)}
|
||||||
res = self._dict_to_nvlist_to_dict(props)
|
res = self._dict_to_nvlist_to_dict(props)
|
||||||
self._assertIntDictsEqual(props, res)
|
self._assertIntDictsEqual(props, res)
|
||||||
|
|
||||||
def test_explicit_int32_min_value(self):
|
def test_explicit_int32_min_value(self):
|
||||||
props = {"key": int32_t(-(2 ** 31))}
|
props = {b"key": int32_t(-(2 ** 31))}
|
||||||
res = self._dict_to_nvlist_to_dict(props)
|
res = self._dict_to_nvlist_to_dict(props)
|
||||||
self._assertIntDictsEqual(props, res)
|
self._assertIntDictsEqual(props, res)
|
||||||
|
|
||||||
def test_explicit_int32_too_large_value(self):
|
def test_explicit_int32_too_large_value(self):
|
||||||
with self.assertRaises(OverflowError):
|
with self.assertRaises(OverflowError):
|
||||||
props = {"key": int32_t(2 ** 31)}
|
props = {b"key": int32_t(2 ** 31)}
|
||||||
self._dict_to_nvlist_to_dict(props)
|
self._dict_to_nvlist_to_dict(props)
|
||||||
|
|
||||||
def test_explicit_int32_too_small_value(self):
|
def test_explicit_int32_too_small_value(self):
|
||||||
with self.assertRaises(OverflowError):
|
with self.assertRaises(OverflowError):
|
||||||
props = {"key": int32_t(-(2 ** 31) - 1)}
|
props = {b"key": int32_t(-(2 ** 31) - 1)}
|
||||||
self._dict_to_nvlist_to_dict(props)
|
self._dict_to_nvlist_to_dict(props)
|
||||||
|
|
||||||
def test_explicit_int16_value(self):
|
def test_explicit_int16_value(self):
|
||||||
props = {"key": int16_t(1)}
|
props = {b"key": int16_t(1)}
|
||||||
res = self._dict_to_nvlist_to_dict(props)
|
res = self._dict_to_nvlist_to_dict(props)
|
||||||
self._assertIntDictsEqual(props, res)
|
self._assertIntDictsEqual(props, res)
|
||||||
|
|
||||||
def test_explicit_int16_max_value(self):
|
def test_explicit_int16_max_value(self):
|
||||||
props = {"key": int16_t(2 ** 15 - 1)}
|
props = {b"key": int16_t(2 ** 15 - 1)}
|
||||||
res = self._dict_to_nvlist_to_dict(props)
|
res = self._dict_to_nvlist_to_dict(props)
|
||||||
self._assertIntDictsEqual(props, res)
|
self._assertIntDictsEqual(props, res)
|
||||||
|
|
||||||
def test_explicit_int16_min_value(self):
|
def test_explicit_int16_min_value(self):
|
||||||
props = {"key": int16_t(-(2 ** 15))}
|
props = {b"key": int16_t(-(2 ** 15))}
|
||||||
res = self._dict_to_nvlist_to_dict(props)
|
res = self._dict_to_nvlist_to_dict(props)
|
||||||
self._assertIntDictsEqual(props, res)
|
self._assertIntDictsEqual(props, res)
|
||||||
|
|
||||||
def test_explicit_int16_too_large_value(self):
|
def test_explicit_int16_too_large_value(self):
|
||||||
with self.assertRaises(OverflowError):
|
with self.assertRaises(OverflowError):
|
||||||
props = {"key": int16_t(2 ** 15)}
|
props = {b"key": int16_t(2 ** 15)}
|
||||||
self._dict_to_nvlist_to_dict(props)
|
self._dict_to_nvlist_to_dict(props)
|
||||||
|
|
||||||
def test_explicit_int16_too_small_value(self):
|
def test_explicit_int16_too_small_value(self):
|
||||||
with self.assertRaises(OverflowError):
|
with self.assertRaises(OverflowError):
|
||||||
props = {"key": int16_t(-(2 ** 15) - 1)}
|
props = {b"key": int16_t(-(2 ** 15) - 1)}
|
||||||
self._dict_to_nvlist_to_dict(props)
|
self._dict_to_nvlist_to_dict(props)
|
||||||
|
|
||||||
def test_explicit_int8_value(self):
|
def test_explicit_int8_value(self):
|
||||||
props = {"key": int8_t(1)}
|
props = {b"key": int8_t(1)}
|
||||||
res = self._dict_to_nvlist_to_dict(props)
|
res = self._dict_to_nvlist_to_dict(props)
|
||||||
self._assertIntDictsEqual(props, res)
|
self._assertIntDictsEqual(props, res)
|
||||||
|
|
||||||
def test_explicit_int8_max_value(self):
|
def test_explicit_int8_max_value(self):
|
||||||
props = {"key": int8_t(2 ** 7 - 1)}
|
props = {b"key": int8_t(2 ** 7 - 1)}
|
||||||
res = self._dict_to_nvlist_to_dict(props)
|
res = self._dict_to_nvlist_to_dict(props)
|
||||||
self._assertIntDictsEqual(props, res)
|
self._assertIntDictsEqual(props, res)
|
||||||
|
|
||||||
def test_explicit_int8_min_value(self):
|
def test_explicit_int8_min_value(self):
|
||||||
props = {"key": int8_t(-(2 ** 7))}
|
props = {b"key": int8_t(-(2 ** 7))}
|
||||||
res = self._dict_to_nvlist_to_dict(props)
|
res = self._dict_to_nvlist_to_dict(props)
|
||||||
self._assertIntDictsEqual(props, res)
|
self._assertIntDictsEqual(props, res)
|
||||||
|
|
||||||
def test_explicit_int8_too_large_value(self):
|
def test_explicit_int8_too_large_value(self):
|
||||||
with self.assertRaises(OverflowError):
|
with self.assertRaises(OverflowError):
|
||||||
props = {"key": int8_t(2 ** 7)}
|
props = {b"key": int8_t(2 ** 7)}
|
||||||
self._dict_to_nvlist_to_dict(props)
|
self._dict_to_nvlist_to_dict(props)
|
||||||
|
|
||||||
def test_explicit_int8_too_small_value(self):
|
def test_explicit_int8_too_small_value(self):
|
||||||
with self.assertRaises(OverflowError):
|
with self.assertRaises(OverflowError):
|
||||||
props = {"key": int8_t(-(2 ** 7) - 1)}
|
props = {b"key": int8_t(-(2 ** 7) - 1)}
|
||||||
self._dict_to_nvlist_to_dict(props)
|
self._dict_to_nvlist_to_dict(props)
|
||||||
|
|
||||||
def test_nested_dict(self):
|
def test_nested_dict(self):
|
||||||
props = {"key": {}}
|
props = {b"key": {}}
|
||||||
res = self._dict_to_nvlist_to_dict(props)
|
res = self._dict_to_nvlist_to_dict(props)
|
||||||
self.assertEqual(props, res)
|
self.assertEqual(props, res)
|
||||||
|
|
||||||
def test_nested_nested_dict(self):
|
def test_nested_nested_dict(self):
|
||||||
props = {"key": {"key": {}}}
|
props = {b"key": {b"key": {}}}
|
||||||
res = self._dict_to_nvlist_to_dict(props)
|
res = self._dict_to_nvlist_to_dict(props)
|
||||||
self.assertEqual(props, res)
|
self.assertEqual(props, res)
|
||||||
|
|
||||||
def test_mismatching_values_array(self):
|
def test_mismatching_values_array(self):
|
||||||
props = {"key": [1, "string"]}
|
props = {b"key": [1, b"string"]}
|
||||||
with self.assertRaises(TypeError):
|
with self.assertRaises(TypeError):
|
||||||
self._dict_to_nvlist_to_dict(props)
|
self._dict_to_nvlist_to_dict(props)
|
||||||
|
|
||||||
def test_mismatching_values_array2(self):
|
def test_mismatching_values_array2(self):
|
||||||
props = {"key": [True, 10]}
|
props = {b"key": [True, 10]}
|
||||||
with self.assertRaises(TypeError):
|
with self.assertRaises(TypeError):
|
||||||
self._dict_to_nvlist_to_dict(props)
|
self._dict_to_nvlist_to_dict(props)
|
||||||
|
|
||||||
def test_mismatching_values_array3(self):
|
def test_mismatching_values_array3(self):
|
||||||
props = {"key": [1, False]}
|
props = {b"key": [1, False]}
|
||||||
with self.assertRaises(TypeError):
|
with self.assertRaises(TypeError):
|
||||||
self._dict_to_nvlist_to_dict(props)
|
self._dict_to_nvlist_to_dict(props)
|
||||||
|
|
||||||
def test_string_array(self):
|
def test_string_array(self):
|
||||||
props = {"key": ["value", "value2"]}
|
props = {b"key": [b"value", b"value2"]}
|
||||||
res = self._dict_to_nvlist_to_dict(props)
|
res = self._dict_to_nvlist_to_dict(props)
|
||||||
self.assertEqual(props, res)
|
self.assertEqual(props, res)
|
||||||
|
|
||||||
def test_boolean_array(self):
|
def test_boolean_array(self):
|
||||||
props = {"key": [True, False]}
|
props = {b"key": [True, False]}
|
||||||
res = self._dict_to_nvlist_to_dict(props)
|
res = self._dict_to_nvlist_to_dict(props)
|
||||||
self.assertEqual(props, res)
|
self.assertEqual(props, res)
|
||||||
|
|
||||||
def test_explicit_boolean_array(self):
|
def test_explicit_boolean_array(self):
|
||||||
props = {"key": [boolean_t(False), boolean_t(True)]}
|
props = {b"key": [boolean_t(False), boolean_t(True)]}
|
||||||
res = self._dict_to_nvlist_to_dict(props)
|
res = self._dict_to_nvlist_to_dict(props)
|
||||||
self._assertIntArrayDictsEqual(props, res)
|
self._assertIntArrayDictsEqual(props, res)
|
||||||
|
|
||||||
def test_uint64_array(self):
|
def test_uint64_array(self):
|
||||||
props = {"key": [0, 1, 2 ** 64 - 1]}
|
props = {b"key": [0, 1, 2 ** 64 - 1]}
|
||||||
res = self._dict_to_nvlist_to_dict(props)
|
res = self._dict_to_nvlist_to_dict(props)
|
||||||
self.assertEqual(props, res)
|
self.assertEqual(props, res)
|
||||||
|
|
||||||
def test_uint64_array_too_large_value(self):
|
def test_uint64_array_too_large_value(self):
|
||||||
props = {"key": [0, 2 ** 64]}
|
props = {b"key": [0, 2 ** 64]}
|
||||||
with self.assertRaises(OverflowError):
|
with self.assertRaises(OverflowError):
|
||||||
self._dict_to_nvlist_to_dict(props)
|
self._dict_to_nvlist_to_dict(props)
|
||||||
|
|
||||||
def test_uint64_array_negative_value(self):
|
def test_uint64_array_negative_value(self):
|
||||||
props = {"key": [0, -1]}
|
props = {b"key": [0, -1]}
|
||||||
with self.assertRaises(OverflowError):
|
with self.assertRaises(OverflowError):
|
||||||
self._dict_to_nvlist_to_dict(props)
|
self._dict_to_nvlist_to_dict(props)
|
||||||
|
|
||||||
def test_mixed_explict_int_array(self):
|
def test_mixed_explict_int_array(self):
|
||||||
with self.assertRaises(TypeError):
|
with self.assertRaises(TypeError):
|
||||||
props = {"key": [uint64_t(0), uint32_t(0)]}
|
props = {b"key": [uint64_t(0), uint32_t(0)]}
|
||||||
self._dict_to_nvlist_to_dict(props)
|
self._dict_to_nvlist_to_dict(props)
|
||||||
|
|
||||||
def test_explict_uint64_array(self):
|
def test_explict_uint64_array(self):
|
||||||
props = {"key": [uint64_t(0), uint64_t(1), uint64_t(2 ** 64 - 1)]}
|
props = {b"key": [uint64_t(0), uint64_t(1), uint64_t(2 ** 64 - 1)]}
|
||||||
res = self._dict_to_nvlist_to_dict(props)
|
res = self._dict_to_nvlist_to_dict(props)
|
||||||
self._assertIntArrayDictsEqual(props, res)
|
self._assertIntArrayDictsEqual(props, res)
|
||||||
|
|
||||||
def test_explict_uint64_array_too_large_value(self):
|
def test_explict_uint64_array_too_large_value(self):
|
||||||
with self.assertRaises(OverflowError):
|
with self.assertRaises(OverflowError):
|
||||||
props = {"key": [uint64_t(0), uint64_t(2 ** 64)]}
|
props = {b"key": [uint64_t(0), uint64_t(2 ** 64)]}
|
||||||
self._dict_to_nvlist_to_dict(props)
|
self._dict_to_nvlist_to_dict(props)
|
||||||
|
|
||||||
def test_explict_uint64_array_negative_value(self):
|
def test_explict_uint64_array_negative_value(self):
|
||||||
with self.assertRaises(OverflowError):
|
with self.assertRaises(OverflowError):
|
||||||
props = {"key": [uint64_t(0), uint64_t(-1)]}
|
props = {b"key": [uint64_t(0), uint64_t(-1)]}
|
||||||
self._dict_to_nvlist_to_dict(props)
|
self._dict_to_nvlist_to_dict(props)
|
||||||
|
|
||||||
def test_explict_uint32_array(self):
|
def test_explict_uint32_array(self):
|
||||||
props = {"key": [uint32_t(0), uint32_t(1), uint32_t(2 ** 32 - 1)]}
|
props = {b"key": [uint32_t(0), uint32_t(1), uint32_t(2 ** 32 - 1)]}
|
||||||
res = self._dict_to_nvlist_to_dict(props)
|
res = self._dict_to_nvlist_to_dict(props)
|
||||||
self._assertIntArrayDictsEqual(props, res)
|
self._assertIntArrayDictsEqual(props, res)
|
||||||
|
|
||||||
def test_explict_uint32_array_too_large_value(self):
|
def test_explict_uint32_array_too_large_value(self):
|
||||||
with self.assertRaises(OverflowError):
|
with self.assertRaises(OverflowError):
|
||||||
props = {"key": [uint32_t(0), uint32_t(2 ** 32)]}
|
props = {b"key": [uint32_t(0), uint32_t(2 ** 32)]}
|
||||||
self._dict_to_nvlist_to_dict(props)
|
self._dict_to_nvlist_to_dict(props)
|
||||||
|
|
||||||
def test_explict_uint32_array_negative_value(self):
|
def test_explict_uint32_array_negative_value(self):
|
||||||
with self.assertRaises(OverflowError):
|
with self.assertRaises(OverflowError):
|
||||||
props = {"key": [uint32_t(0), uint32_t(-1)]}
|
props = {b"key": [uint32_t(0), uint32_t(-1)]}
|
||||||
self._dict_to_nvlist_to_dict(props)
|
self._dict_to_nvlist_to_dict(props)
|
||||||
|
|
||||||
def test_explict_uint16_array(self):
|
def test_explict_uint16_array(self):
|
||||||
props = {"key": [uint16_t(0), uint16_t(1), uint16_t(2 ** 16 - 1)]}
|
props = {b"key": [uint16_t(0), uint16_t(1), uint16_t(2 ** 16 - 1)]}
|
||||||
res = self._dict_to_nvlist_to_dict(props)
|
res = self._dict_to_nvlist_to_dict(props)
|
||||||
self._assertIntArrayDictsEqual(props, res)
|
self._assertIntArrayDictsEqual(props, res)
|
||||||
|
|
||||||
def test_explict_uint16_array_too_large_value(self):
|
def test_explict_uint16_array_too_large_value(self):
|
||||||
with self.assertRaises(OverflowError):
|
with self.assertRaises(OverflowError):
|
||||||
props = {"key": [uint16_t(0), uint16_t(2 ** 16)]}
|
props = {b"key": [uint16_t(0), uint16_t(2 ** 16)]}
|
||||||
self._dict_to_nvlist_to_dict(props)
|
self._dict_to_nvlist_to_dict(props)
|
||||||
|
|
||||||
def test_explict_uint16_array_negative_value(self):
|
def test_explict_uint16_array_negative_value(self):
|
||||||
with self.assertRaises(OverflowError):
|
with self.assertRaises(OverflowError):
|
||||||
props = {"key": [uint16_t(0), uint16_t(-1)]}
|
props = {b"key": [uint16_t(0), uint16_t(-1)]}
|
||||||
self._dict_to_nvlist_to_dict(props)
|
self._dict_to_nvlist_to_dict(props)
|
||||||
|
|
||||||
def test_explict_uint8_array(self):
|
def test_explict_uint8_array(self):
|
||||||
props = {"key": [uint8_t(0), uint8_t(1), uint8_t(2 ** 8 - 1)]}
|
props = {b"key": [uint8_t(0), uint8_t(1), uint8_t(2 ** 8 - 1)]}
|
||||||
res = self._dict_to_nvlist_to_dict(props)
|
res = self._dict_to_nvlist_to_dict(props)
|
||||||
self._assertIntArrayDictsEqual(props, res)
|
self._assertIntArrayDictsEqual(props, res)
|
||||||
|
|
||||||
def test_explict_uint8_array_too_large_value(self):
|
def test_explict_uint8_array_too_large_value(self):
|
||||||
with self.assertRaises(OverflowError):
|
with self.assertRaises(OverflowError):
|
||||||
props = {"key": [uint8_t(0), uint8_t(2 ** 8)]}
|
props = {b"key": [uint8_t(0), uint8_t(2 ** 8)]}
|
||||||
self._dict_to_nvlist_to_dict(props)
|
self._dict_to_nvlist_to_dict(props)
|
||||||
|
|
||||||
def test_explict_uint8_array_negative_value(self):
|
def test_explict_uint8_array_negative_value(self):
|
||||||
with self.assertRaises(OverflowError):
|
with self.assertRaises(OverflowError):
|
||||||
props = {"key": [uint8_t(0), uint8_t(-1)]}
|
props = {b"key": [uint8_t(0), uint8_t(-1)]}
|
||||||
self._dict_to_nvlist_to_dict(props)
|
self._dict_to_nvlist_to_dict(props)
|
||||||
|
|
||||||
def test_explict_byte_array(self):
|
def test_explict_byte_array(self):
|
||||||
props = {"key": [uchar_t(0), uchar_t(1), uchar_t(2 ** 8 - 1)]}
|
props = {b"key": [uchar_t(0), uchar_t(1), uchar_t(2 ** 8 - 1)]}
|
||||||
res = self._dict_to_nvlist_to_dict(props)
|
res = self._dict_to_nvlist_to_dict(props)
|
||||||
self._assertIntArrayDictsEqual(props, res)
|
self._assertIntArrayDictsEqual(props, res)
|
||||||
|
|
||||||
def test_explict_byte_array_too_large_value(self):
|
def test_explict_byte_array_too_large_value(self):
|
||||||
with self.assertRaises(OverflowError):
|
with self.assertRaises(OverflowError):
|
||||||
props = {"key": [uchar_t(0), uchar_t(2 ** 8)]}
|
props = {b"key": [uchar_t(0), uchar_t(2 ** 8)]}
|
||||||
self._dict_to_nvlist_to_dict(props)
|
self._dict_to_nvlist_to_dict(props)
|
||||||
|
|
||||||
def test_explict_byte_array_negative_value(self):
|
def test_explict_byte_array_negative_value(self):
|
||||||
with self.assertRaises(OverflowError):
|
with self.assertRaises(OverflowError):
|
||||||
props = {"key": [uchar_t(0), uchar_t(-1)]}
|
props = {b"key": [uchar_t(0), uchar_t(-1)]}
|
||||||
self._dict_to_nvlist_to_dict(props)
|
self._dict_to_nvlist_to_dict(props)
|
||||||
|
|
||||||
def test_explict_int64_array(self):
|
def test_explict_int64_array(self):
|
||||||
props = {"key": [
|
props = {b"key": [
|
||||||
int64_t(0), int64_t(1), int64_t(2 ** 63 - 1), int64_t(-(2 ** 63))]}
|
int64_t(0), int64_t(1), int64_t(2 ** 63 - 1), int64_t(-(2 ** 63))]}
|
||||||
res = self._dict_to_nvlist_to_dict(props)
|
res = self._dict_to_nvlist_to_dict(props)
|
||||||
self._assertIntArrayDictsEqual(props, res)
|
self._assertIntArrayDictsEqual(props, res)
|
||||||
|
|
||||||
def test_explict_int64_array_too_large_value(self):
|
def test_explict_int64_array_too_large_value(self):
|
||||||
with self.assertRaises(OverflowError):
|
with self.assertRaises(OverflowError):
|
||||||
props = {"key": [int64_t(0), int64_t(2 ** 63)]}
|
props = {b"key": [int64_t(0), int64_t(2 ** 63)]}
|
||||||
self._dict_to_nvlist_to_dict(props)
|
self._dict_to_nvlist_to_dict(props)
|
||||||
|
|
||||||
def test_explict_int64_array_too_small_value(self):
|
def test_explict_int64_array_too_small_value(self):
|
||||||
with self.assertRaises(OverflowError):
|
with self.assertRaises(OverflowError):
|
||||||
props = {"key": [int64_t(0), int64_t(-(2 ** 63) - 1)]}
|
props = {b"key": [int64_t(0), int64_t(-(2 ** 63) - 1)]}
|
||||||
self._dict_to_nvlist_to_dict(props)
|
self._dict_to_nvlist_to_dict(props)
|
||||||
|
|
||||||
def test_explict_int32_array(self):
|
def test_explict_int32_array(self):
|
||||||
props = {"key": [
|
props = {b"key": [
|
||||||
int32_t(0), int32_t(1), int32_t(2 ** 31 - 1), int32_t(-(2 ** 31))]}
|
int32_t(0), int32_t(1), int32_t(2 ** 31 - 1), int32_t(-(2 ** 31))]}
|
||||||
res = self._dict_to_nvlist_to_dict(props)
|
res = self._dict_to_nvlist_to_dict(props)
|
||||||
self._assertIntArrayDictsEqual(props, res)
|
self._assertIntArrayDictsEqual(props, res)
|
||||||
|
|
||||||
def test_explict_int32_array_too_large_value(self):
|
def test_explict_int32_array_too_large_value(self):
|
||||||
with self.assertRaises(OverflowError):
|
with self.assertRaises(OverflowError):
|
||||||
props = {"key": [int32_t(0), int32_t(2 ** 31)]}
|
props = {b"key": [int32_t(0), int32_t(2 ** 31)]}
|
||||||
self._dict_to_nvlist_to_dict(props)
|
self._dict_to_nvlist_to_dict(props)
|
||||||
|
|
||||||
def test_explict_int32_array_too_small_value(self):
|
def test_explict_int32_array_too_small_value(self):
|
||||||
with self.assertRaises(OverflowError):
|
with self.assertRaises(OverflowError):
|
||||||
props = {"key": [int32_t(0), int32_t(-(2 ** 31) - 1)]}
|
props = {b"key": [int32_t(0), int32_t(-(2 ** 31) - 1)]}
|
||||||
self._dict_to_nvlist_to_dict(props)
|
self._dict_to_nvlist_to_dict(props)
|
||||||
|
|
||||||
def test_explict_int16_array(self):
|
def test_explict_int16_array(self):
|
||||||
props = {"key": [
|
props = {b"key": [
|
||||||
int16_t(0), int16_t(1), int16_t(2 ** 15 - 1), int16_t(-(2 ** 15))]}
|
int16_t(0), int16_t(1), int16_t(2 ** 15 - 1), int16_t(-(2 ** 15))]}
|
||||||
res = self._dict_to_nvlist_to_dict(props)
|
res = self._dict_to_nvlist_to_dict(props)
|
||||||
self._assertIntArrayDictsEqual(props, res)
|
self._assertIntArrayDictsEqual(props, res)
|
||||||
|
|
||||||
def test_explict_int16_array_too_large_value(self):
|
def test_explict_int16_array_too_large_value(self):
|
||||||
with self.assertRaises(OverflowError):
|
with self.assertRaises(OverflowError):
|
||||||
props = {"key": [int16_t(0), int16_t(2 ** 15)]}
|
props = {b"key": [int16_t(0), int16_t(2 ** 15)]}
|
||||||
self._dict_to_nvlist_to_dict(props)
|
self._dict_to_nvlist_to_dict(props)
|
||||||
|
|
||||||
def test_explict_int16_array_too_small_value(self):
|
def test_explict_int16_array_too_small_value(self):
|
||||||
with self.assertRaises(OverflowError):
|
with self.assertRaises(OverflowError):
|
||||||
props = {"key": [int16_t(0), int16_t(-(2 ** 15) - 1)]}
|
props = {b"key": [int16_t(0), int16_t(-(2 ** 15) - 1)]}
|
||||||
self._dict_to_nvlist_to_dict(props)
|
self._dict_to_nvlist_to_dict(props)
|
||||||
|
|
||||||
def test_explict_int8_array(self):
|
def test_explict_int8_array(self):
|
||||||
props = {"key": [
|
props = {b"key": [
|
||||||
int8_t(0), int8_t(1), int8_t(2 ** 7 - 1), int8_t(-(2 ** 7))]}
|
int8_t(0), int8_t(1), int8_t(2 ** 7 - 1), int8_t(-(2 ** 7))]}
|
||||||
res = self._dict_to_nvlist_to_dict(props)
|
res = self._dict_to_nvlist_to_dict(props)
|
||||||
self._assertIntArrayDictsEqual(props, res)
|
self._assertIntArrayDictsEqual(props, res)
|
||||||
|
|
||||||
def test_explict_int8_array_too_large_value(self):
|
def test_explict_int8_array_too_large_value(self):
|
||||||
with self.assertRaises(OverflowError):
|
with self.assertRaises(OverflowError):
|
||||||
props = {"key": [int8_t(0), int8_t(2 ** 7)]}
|
props = {b"key": [int8_t(0), int8_t(2 ** 7)]}
|
||||||
self._dict_to_nvlist_to_dict(props)
|
self._dict_to_nvlist_to_dict(props)
|
||||||
|
|
||||||
def test_explict_int8_array_too_small_value(self):
|
def test_explict_int8_array_too_small_value(self):
|
||||||
with self.assertRaises(OverflowError):
|
with self.assertRaises(OverflowError):
|
||||||
props = {"key": [int8_t(0), int8_t(-(2 ** 7) - 1)]}
|
props = {b"key": [int8_t(0), int8_t(-(2 ** 7) - 1)]}
|
||||||
self._dict_to_nvlist_to_dict(props)
|
self._dict_to_nvlist_to_dict(props)
|
||||||
|
|
||||||
def test_dict_array(self):
|
def test_dict_array(self):
|
||||||
props = {"key": [{"key": 1}, {"key": None}, {"key": {}}]}
|
props = {b"key": [{b"key": 1}, {b"key": None}, {b"key": {}}]}
|
||||||
res = self._dict_to_nvlist_to_dict(props)
|
res = self._dict_to_nvlist_to_dict(props)
|
||||||
self.assertEqual(props, res)
|
self.assertEqual(props, res)
|
||||||
|
|
||||||
def test_implicit_uint32_value(self):
|
def test_implicit_uint32_value(self):
|
||||||
props = {"rewind-request": 1}
|
props = {b"rewind-request": 1}
|
||||||
res = self._dict_to_nvlist_to_dict(props)
|
res = self._dict_to_nvlist_to_dict(props)
|
||||||
self._assertIntDictsEqual(props, res)
|
self._assertIntDictsEqual(props, res)
|
||||||
|
|
||||||
def test_implicit_uint32_max_value(self):
|
def test_implicit_uint32_max_value(self):
|
||||||
props = {"rewind-request": 2 ** 32 - 1}
|
props = {b"rewind-request": 2 ** 32 - 1}
|
||||||
res = self._dict_to_nvlist_to_dict(props)
|
res = self._dict_to_nvlist_to_dict(props)
|
||||||
self._assertIntDictsEqual(props, res)
|
self._assertIntDictsEqual(props, res)
|
||||||
|
|
||||||
def test_implicit_uint32_too_large_value(self):
|
def test_implicit_uint32_too_large_value(self):
|
||||||
with self.assertRaises(OverflowError):
|
with self.assertRaises(OverflowError):
|
||||||
props = {"rewind-request": 2 ** 32}
|
props = {b"rewind-request": 2 ** 32}
|
||||||
self._dict_to_nvlist_to_dict(props)
|
self._dict_to_nvlist_to_dict(props)
|
||||||
|
|
||||||
def test_implicit_uint32_negative_value(self):
|
def test_implicit_uint32_negative_value(self):
|
||||||
with self.assertRaises(OverflowError):
|
with self.assertRaises(OverflowError):
|
||||||
props = {"rewind-request": -1}
|
props = {b"rewind-request": -1}
|
||||||
self._dict_to_nvlist_to_dict(props)
|
self._dict_to_nvlist_to_dict(props)
|
||||||
|
|
||||||
def test_implicit_int32_value(self):
|
def test_implicit_int32_value(self):
|
||||||
props = {"pool_context": 1}
|
props = {b"pool_context": 1}
|
||||||
res = self._dict_to_nvlist_to_dict(props)
|
res = self._dict_to_nvlist_to_dict(props)
|
||||||
self._assertIntDictsEqual(props, res)
|
self._assertIntDictsEqual(props, res)
|
||||||
|
|
||||||
def test_implicit_int32_max_value(self):
|
def test_implicit_int32_max_value(self):
|
||||||
props = {"pool_context": 2 ** 31 - 1}
|
props = {b"pool_context": 2 ** 31 - 1}
|
||||||
res = self._dict_to_nvlist_to_dict(props)
|
res = self._dict_to_nvlist_to_dict(props)
|
||||||
self._assertIntDictsEqual(props, res)
|
self._assertIntDictsEqual(props, res)
|
||||||
|
|
||||||
def test_implicit_int32_min_value(self):
|
def test_implicit_int32_min_value(self):
|
||||||
props = {"pool_context": -(2 ** 31)}
|
props = {b"pool_context": -(2 ** 31)}
|
||||||
res = self._dict_to_nvlist_to_dict(props)
|
res = self._dict_to_nvlist_to_dict(props)
|
||||||
self._assertIntDictsEqual(props, res)
|
self._assertIntDictsEqual(props, res)
|
||||||
|
|
||||||
def test_implicit_int32_too_large_value(self):
|
def test_implicit_int32_too_large_value(self):
|
||||||
with self.assertRaises(OverflowError):
|
with self.assertRaises(OverflowError):
|
||||||
props = {"pool_context": 2 ** 31}
|
props = {b"pool_context": 2 ** 31}
|
||||||
self._dict_to_nvlist_to_dict(props)
|
self._dict_to_nvlist_to_dict(props)
|
||||||
|
|
||||||
def test_implicit_int32_too_small_value(self):
|
def test_implicit_int32_too_small_value(self):
|
||||||
with self.assertRaises(OverflowError):
|
with self.assertRaises(OverflowError):
|
||||||
props = {"pool_context": -(2 ** 31) - 1}
|
props = {b"pool_context": -(2 ** 31) - 1}
|
||||||
self._dict_to_nvlist_to_dict(props)
|
self._dict_to_nvlist_to_dict(props)
|
||||||
|
|
||||||
def test_complex_dict(self):
|
def test_complex_dict(self):
|
||||||
props = {
|
props = {
|
||||||
"key1": "str",
|
b"key1": b"str",
|
||||||
"key2": 10,
|
b"key2": 10,
|
||||||
"key3": {
|
b"key3": {
|
||||||
"skey1": True,
|
b"skey1": True,
|
||||||
"skey2": None,
|
b"skey2": None,
|
||||||
"skey3": [
|
b"skey3": [
|
||||||
True,
|
True,
|
||||||
False,
|
False,
|
||||||
True
|
True
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"key4": [
|
b"key4": [
|
||||||
"ab",
|
b"ab",
|
||||||
"bc"
|
b"bc"
|
||||||
],
|
],
|
||||||
"key5": [
|
b"key5": [
|
||||||
2 ** 64 - 1,
|
2 ** 64 - 1,
|
||||||
1,
|
1,
|
||||||
2,
|
2,
|
||||||
3
|
3
|
||||||
],
|
],
|
||||||
"key6": [
|
b"key6": [
|
||||||
{
|
{
|
||||||
"skey71": "a",
|
b"skey71": b"a",
|
||||||
"skey72": "b",
|
b"skey72": b"b",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"skey71": "c",
|
b"skey71": b"c",
|
||||||
"skey72": "d",
|
b"skey72": b"d",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"skey71": "e",
|
b"skey71": b"e",
|
||||||
"skey72": "f",
|
b"skey72": b"f",
|
||||||
}
|
}
|
||||||
|
|
||||||
],
|
],
|
||||||
"type": 2 ** 32 - 1,
|
b"type": 2 ** 32 - 1,
|
||||||
"pool_context": -(2 ** 31)
|
b"pool_context": -(2 ** 31)
|
||||||
}
|
}
|
||||||
res = self._dict_to_nvlist_to_dict(props)
|
res = self._dict_to_nvlist_to_dict(props)
|
||||||
self.assertEqual(props, res)
|
self.assertEqual(props, res)
|
||||||
|
@ -13,6 +13,7 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
#
|
#
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
|
||||||
from setuptools import setup, find_packages
|
from setuptools import setup, find_packages
|
||||||
|
|
||||||
@ -28,8 +29,13 @@ setup(
|
|||||||
"Development Status :: 4 - Beta",
|
"Development Status :: 4 - Beta",
|
||||||
"Intended Audience :: Developers",
|
"Intended Audience :: Developers",
|
||||||
"License :: OSI Approved :: Apache Software License",
|
"License :: OSI Approved :: Apache Software License",
|
||||||
"Programming Language :: Python :: 2 :: Only",
|
"Programming Language :: Python :: 2",
|
||||||
"Programming Language :: Python :: 2.7",
|
"Programming Language :: Python :: 2.7",
|
||||||
|
"Programming Language :: Python :: 3",
|
||||||
|
"Programming Language :: Python :: 3.4",
|
||||||
|
"Programming Language :: Python :: 3.5",
|
||||||
|
"Programming Language :: Python :: 3.6",
|
||||||
|
"Programming Language :: Python :: 3.7",
|
||||||
"Topic :: System :: Filesystems",
|
"Topic :: System :: Filesystems",
|
||||||
"Topic :: Software Development :: Libraries",
|
"Topic :: Software Development :: Libraries",
|
||||||
],
|
],
|
||||||
@ -47,7 +53,7 @@ setup(
|
|||||||
setup_requires=[
|
setup_requires=[
|
||||||
"cffi",
|
"cffi",
|
||||||
],
|
],
|
||||||
python_requires='>=2.7,<3',
|
python_requires='>=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,<4',
|
||||||
zip_safe=False,
|
zip_safe=False,
|
||||||
test_suite="libzfs_core.test",
|
test_suite="libzfs_core.test",
|
||||||
)
|
)
|
||||||
|
@ -53,16 +53,6 @@
|
|||||||
%bcond_with asan
|
%bcond_with asan
|
||||||
%bcond_with systemd
|
%bcond_with systemd
|
||||||
|
|
||||||
# Python permits the !/usr/bin/python shebang for scripts that are cross
|
|
||||||
# compatible between python2 and python3, but Fedora 28 does not. Fedora
|
|
||||||
# wants us to choose python3 for cross-compatible scripts. Since we want
|
|
||||||
# to support python2 and python3 users, exclude our scripts from Fedora 28's
|
|
||||||
# RPM build check, so that we don't get a bunch of build warnings.
|
|
||||||
#
|
|
||||||
# Details: https://github.com/zfsonlinux/zfs/issues/7360
|
|
||||||
#
|
|
||||||
%global __brp_mangle_shebangs_exclude_from arc_summary.py|arcstat.py|dbufstat.py|test-runner.py|zts-report.py
|
|
||||||
|
|
||||||
# Generic enable switch for systemd
|
# Generic enable switch for systemd
|
||||||
%if %{with systemd}
|
%if %{with systemd}
|
||||||
%define _systemd 1
|
%define _systemd 1
|
||||||
@ -85,6 +75,32 @@
|
|||||||
%define _systemd 1
|
%define _systemd 1
|
||||||
%endif
|
%endif
|
||||||
|
|
||||||
|
# When not specified default to distribution provided version. This
|
||||||
|
# is normally Python 3, but for RHEL <= 7 only Python 2 is provided.
|
||||||
|
%if %{undefined __use_python}
|
||||||
|
%if 0%{?rhel} && 0%{?rhel} <= 7
|
||||||
|
%define __python /usr/bin/python2
|
||||||
|
%define __python_pkg_version 2
|
||||||
|
%define __python_cffi_pkg python-cffi
|
||||||
|
%else
|
||||||
|
%define __python /usr/bin/python3
|
||||||
|
%define __python_pkg_version 3
|
||||||
|
%define __python_cffi_pkg python3-cffi
|
||||||
|
%endif
|
||||||
|
%else
|
||||||
|
%define __python %{__use_python}
|
||||||
|
%define __python_pkg_version %{__use_python_pkg_version}
|
||||||
|
%define __python_cffi_pkg python%{__python_pkg_version}-cffi
|
||||||
|
%endif
|
||||||
|
|
||||||
|
# By default python-pyzfs is enabled, with the exception of
|
||||||
|
# RHEL 6 which by default uses Python 2.6 which is too old.
|
||||||
|
%if 0%{?rhel} == 6
|
||||||
|
%bcond_with pyzfs
|
||||||
|
%else
|
||||||
|
%bcond_without pyzfs
|
||||||
|
%endif
|
||||||
|
|
||||||
Name: @PACKAGE@
|
Name: @PACKAGE@
|
||||||
Version: @VERSION@
|
Version: @VERSION@
|
||||||
Release: @RELEASE@%{?dist}
|
Release: @RELEASE@%{?dist}
|
||||||
@ -135,7 +151,7 @@ Requires: util-linux
|
|||||||
Requires: sysstat
|
Requires: sysstat
|
||||||
|
|
||||||
%description
|
%description
|
||||||
This package contains the ZFS command line utilities.
|
This package contains the core ZFS command line utilities.
|
||||||
|
|
||||||
%package -n libzpool2
|
%package -n libzpool2
|
||||||
Summary: Native ZFS pool library for Linux
|
Summary: Native ZFS pool library for Linux
|
||||||
@ -219,6 +235,7 @@ Requires: acl
|
|||||||
Requires: sudo
|
Requires: sudo
|
||||||
Requires: sysstat
|
Requires: sysstat
|
||||||
Requires: libaio
|
Requires: libaio
|
||||||
|
Requires: python%{__python_pkg_version}
|
||||||
%if 0%{?rhel}%{?fedora}%{?suse_version}
|
%if 0%{?rhel}%{?fedora}%{?suse_version}
|
||||||
BuildRequires: libaio-devel
|
BuildRequires: libaio-devel
|
||||||
%endif
|
%endif
|
||||||
@ -240,23 +257,23 @@ Requires: grep
|
|||||||
This package contains a dracut module used to construct an initramfs
|
This package contains a dracut module used to construct an initramfs
|
||||||
image which is ZFS aware.
|
image which is ZFS aware.
|
||||||
|
|
||||||
%if 0%{?_pyzfs}
|
%if %{with pyzfs}
|
||||||
%package -n pyzfs
|
%package -n python%{__python_pkg_version}-pyzfs
|
||||||
Summary: Python wrapper for libzfs_core
|
Summary: Python %{python_version} wrapper for libzfs_core
|
||||||
Group: Development/Languages/Python
|
Group: Development/Languages/Python
|
||||||
License: Apache-2.0
|
License: Apache-2.0
|
||||||
BuildArch: noarch
|
BuildArch: noarch
|
||||||
Requires: libzfs2 = %{version}
|
Requires: libzfs2 = %{version}
|
||||||
Requires: libnvpair1 = %{version}
|
Requires: libnvpair1 = %{version}
|
||||||
Requires: libffi
|
Requires: libffi
|
||||||
Requires: python >= 2.7
|
Requires: python%{__python_pkg_version}
|
||||||
Requires: python-cffi
|
Requires: %{__python_cffi_pkg}
|
||||||
%if 0%{?rhel}%{?fedora}%{?suse_version}
|
%if 0%{?rhel}%{?fedora}%{?suse_version}
|
||||||
BuildRequires: python-devel
|
BuildRequires: python%{__python_pkg_version}-devel
|
||||||
BuildRequires: libffi-devel
|
BuildRequires: libffi-devel
|
||||||
%endif
|
%endif
|
||||||
|
|
||||||
%description -n pyzfs
|
%description -n python%{__python_pkg_version}-pyzfs
|
||||||
This package provides a python wrapper for the libzfs_core C library.
|
This package provides a python wrapper for the libzfs_core C library.
|
||||||
%endif
|
%endif
|
||||||
|
|
||||||
@ -299,6 +316,12 @@ image which is ZFS aware.
|
|||||||
%define systemd --enable-sysvinit --disable-systemd
|
%define systemd --enable-sysvinit --disable-systemd
|
||||||
%endif
|
%endif
|
||||||
|
|
||||||
|
%if %{with pyzfs}
|
||||||
|
%define pyzfs --enable-pyzfs
|
||||||
|
%else
|
||||||
|
%define pyzfs --disable-pyzfs
|
||||||
|
%endif
|
||||||
|
|
||||||
%setup -q
|
%setup -q
|
||||||
|
|
||||||
%build
|
%build
|
||||||
@ -307,11 +330,13 @@ image which is ZFS aware.
|
|||||||
--with-udevdir=%{_udevdir} \
|
--with-udevdir=%{_udevdir} \
|
||||||
--with-udevruledir=%{_udevruledir} \
|
--with-udevruledir=%{_udevruledir} \
|
||||||
--with-dracutdir=%{_dracutdir} \
|
--with-dracutdir=%{_dracutdir} \
|
||||||
|
--with-python=%{__python} \
|
||||||
--disable-static \
|
--disable-static \
|
||||||
%{debug} \
|
%{debug} \
|
||||||
%{debuginfo} \
|
%{debuginfo} \
|
||||||
%{asan} \
|
%{asan} \
|
||||||
%{systemd}
|
%{systemd}\
|
||||||
|
%{pyzfs}
|
||||||
make %{?_smp_mflags}
|
make %{?_smp_mflags}
|
||||||
|
|
||||||
%install
|
%install
|
||||||
@ -379,12 +404,20 @@ systemctl --system daemon-reload >/dev/null || true
|
|||||||
%endif
|
%endif
|
||||||
|
|
||||||
%files
|
%files
|
||||||
|
# Core utilities
|
||||||
%{_sbindir}/*
|
%{_sbindir}/*
|
||||||
%{_bindir}/*
|
%{_bindir}/raidz_test
|
||||||
%{_libexecdir}/%{name}
|
%{_bindir}/zgenhostid
|
||||||
|
# Optional Python 2/3 scripts
|
||||||
|
%{_bindir}/arc_summary
|
||||||
|
%{_bindir}/arcstat
|
||||||
|
%{_bindir}/dbufstat
|
||||||
|
# Man pages
|
||||||
%{_mandir}/man1/*
|
%{_mandir}/man1/*
|
||||||
%{_mandir}/man5/*
|
%{_mandir}/man5/*
|
||||||
%{_mandir}/man8/*
|
%{_mandir}/man8/*
|
||||||
|
# Configuration files and scripts
|
||||||
|
%{_libexecdir}/%{name}
|
||||||
%{_udevdir}/vdev_id
|
%{_udevdir}/vdev_id
|
||||||
%{_udevdir}/zvol_id
|
%{_udevdir}/zvol_id
|
||||||
%{_udevdir}/rules.d/*
|
%{_udevdir}/rules.d/*
|
||||||
@ -426,8 +459,8 @@ systemctl --system daemon-reload >/dev/null || true
|
|||||||
%doc contrib/dracut/README.dracut.markdown
|
%doc contrib/dracut/README.dracut.markdown
|
||||||
%{_dracutdir}/modules.d/*
|
%{_dracutdir}/modules.d/*
|
||||||
|
|
||||||
%if 0%{?_pyzfs}
|
%if %{with pyzfs}
|
||||||
%files -n pyzfs
|
%files -n python%{__python_pkg_version}-pyzfs
|
||||||
%doc contrib/pyzfs/README
|
%doc contrib/pyzfs/README
|
||||||
%doc contrib/pyzfs/LICENSE
|
%doc contrib/pyzfs/LICENSE
|
||||||
%defattr(-,root,root,-)
|
%defattr(-,root,root,-)
|
||||||
|
@ -247,6 +247,10 @@ constrain_path() {
|
|||||||
ln -fs "$STF_PATH/gunzip" "$STF_PATH/uncompress"
|
ln -fs "$STF_PATH/gunzip" "$STF_PATH/uncompress"
|
||||||
ln -fs "$STF_PATH/exportfs" "$STF_PATH/share"
|
ln -fs "$STF_PATH/exportfs" "$STF_PATH/share"
|
||||||
ln -fs "$STF_PATH/exportfs" "$STF_PATH/unshare"
|
ln -fs "$STF_PATH/exportfs" "$STF_PATH/unshare"
|
||||||
|
|
||||||
|
if [ -L "$STF_PATH/arc_summary3" ]; then
|
||||||
|
ln -fs "$STF_PATH/arc_summary3" "$STF_PATH/arc_summary"
|
||||||
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
#
|
#
|
||||||
|
@ -477,8 +477,7 @@ tests = ['zdb_001_neg', 'zfs_001_neg', 'zfs_allow_001_neg',
|
|||||||
'zpool_offline_001_neg', 'zpool_online_001_neg', 'zpool_remove_001_neg',
|
'zpool_offline_001_neg', 'zpool_online_001_neg', 'zpool_remove_001_neg',
|
||||||
'zpool_replace_001_neg', 'zpool_scrub_001_neg', 'zpool_set_001_neg',
|
'zpool_replace_001_neg', 'zpool_scrub_001_neg', 'zpool_set_001_neg',
|
||||||
'zpool_status_001_neg', 'zpool_upgrade_001_neg', 'arcstat_001_pos',
|
'zpool_status_001_neg', 'zpool_upgrade_001_neg', 'arcstat_001_pos',
|
||||||
'arc_summary_001_pos', 'arc_summary_002_neg',
|
'arc_summary_001_pos', 'arc_summary_002_neg', 'dbufstat_001_pos']
|
||||||
'arc_summary3_001_pos', 'dbufstat_001_pos']
|
|
||||||
user =
|
user =
|
||||||
tags = ['functional', 'cli_user', 'misc']
|
tags = ['functional', 'cli_user', 'misc']
|
||||||
|
|
||||||
|
@ -2,3 +2,14 @@ pkgdatadir = $(datadir)/@PACKAGE@/test-runner/bin
|
|||||||
dist_pkgdata_SCRIPTS = \
|
dist_pkgdata_SCRIPTS = \
|
||||||
test-runner.py \
|
test-runner.py \
|
||||||
zts-report.py
|
zts-report.py
|
||||||
|
#
|
||||||
|
# These scripts are compatibile with both Python 2.6 and 3.4. As such the
|
||||||
|
# python 3 shebang can be replaced at install time when targeting a python
|
||||||
|
# 2 system. This allows us to maintain a single version of the source.
|
||||||
|
#
|
||||||
|
if USING_PYTHON_2
|
||||||
|
install-data-hook:
|
||||||
|
sed --in-place 's|^#!/usr/bin/python3|#!/usr/bin/python2|' \
|
||||||
|
$(DESTDIR)$(pkgdatadir)/test-runner.py \
|
||||||
|
$(DESTDIR)$(pkgdatadir)/zts-report.py
|
||||||
|
endif
|
||||||
|
@ -12,9 +12,11 @@
|
|||||||
#
|
#
|
||||||
|
|
||||||
#
|
#
|
||||||
# Copyright (c) 2012, 2015 by Delphix. All rights reserved.
|
# Copyright (c) 2012, 2018 by Delphix. All rights reserved.
|
||||||
# Copyright (c) 2017 Datto Inc.
|
# Copyright (c) 2017 Datto Inc.
|
||||||
#
|
#
|
||||||
|
# This script must remain compatible with Python 2.6+ and Python 3.4+.
|
||||||
|
#
|
||||||
|
|
||||||
# some python 2.7 system don't have a configparser shim
|
# some python 2.7 system don't have a configparser shim
|
||||||
try:
|
try:
|
||||||
@ -23,7 +25,8 @@ except ImportError:
|
|||||||
import ConfigParser as configparser
|
import ConfigParser as configparser
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import logging
|
import sys
|
||||||
|
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
from pwd import getpwnam
|
from pwd import getpwnam
|
||||||
@ -31,8 +34,6 @@ from pwd import getpwuid
|
|||||||
from select import select
|
from select import select
|
||||||
from subprocess import PIPE
|
from subprocess import PIPE
|
||||||
from subprocess import Popen
|
from subprocess import Popen
|
||||||
from sys import argv
|
|
||||||
from sys import maxsize
|
|
||||||
from threading import Timer
|
from threading import Timer
|
||||||
from time import time
|
from time import time
|
||||||
|
|
||||||
@ -41,6 +42,10 @@ TESTDIR = '/usr/share/zfs/'
|
|||||||
KILL = 'kill'
|
KILL = 'kill'
|
||||||
TRUE = 'true'
|
TRUE = 'true'
|
||||||
SUDO = 'sudo'
|
SUDO = 'sudo'
|
||||||
|
LOG_FILE = 'LOG_FILE'
|
||||||
|
LOG_OUT = 'LOG_OUT'
|
||||||
|
LOG_ERR = 'LOG_ERR'
|
||||||
|
LOG_FILE_OBJ = None
|
||||||
|
|
||||||
|
|
||||||
class Result(object):
|
class Result(object):
|
||||||
@ -84,7 +89,7 @@ class Output(object):
|
|||||||
"""
|
"""
|
||||||
def __init__(self, stream):
|
def __init__(self, stream):
|
||||||
self.stream = stream
|
self.stream = stream
|
||||||
self._buf = ''
|
self._buf = b''
|
||||||
self.lines = []
|
self.lines = []
|
||||||
|
|
||||||
def fileno(self):
|
def fileno(self):
|
||||||
@ -109,15 +114,15 @@ class Output(object):
|
|||||||
buf = os.read(fd, 4096)
|
buf = os.read(fd, 4096)
|
||||||
if not buf:
|
if not buf:
|
||||||
return None
|
return None
|
||||||
if '\n' not in buf:
|
if b'\n' not in buf:
|
||||||
self._buf += buf
|
self._buf += buf
|
||||||
return []
|
return []
|
||||||
|
|
||||||
buf = self._buf + buf
|
buf = self._buf + buf
|
||||||
tmp, rest = buf.rsplit('\n', 1)
|
tmp, rest = buf.rsplit(b'\n', 1)
|
||||||
self._buf = rest
|
self._buf = rest
|
||||||
now = datetime.now()
|
now = datetime.now()
|
||||||
rows = tmp.split('\n')
|
rows = tmp.split(b'\n')
|
||||||
self.lines += [(now, r) for r in rows]
|
self.lines += [(now, r) for r in rows]
|
||||||
|
|
||||||
|
|
||||||
@ -225,7 +230,7 @@ class Cmd(object):
|
|||||||
proc = Popen(privcmd, stdout=PIPE, stderr=PIPE)
|
proc = Popen(privcmd, stdout=PIPE, stderr=PIPE)
|
||||||
# Allow a special timeout value of 0 to mean infinity
|
# Allow a special timeout value of 0 to mean infinity
|
||||||
if int(self.timeout) == 0:
|
if int(self.timeout) == 0:
|
||||||
self.timeout = maxsize
|
self.timeout = sys.maxsize
|
||||||
t = Timer(int(self.timeout), self.kill_cmd, [proc])
|
t = Timer(int(self.timeout), self.kill_cmd, [proc])
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@ -252,50 +257,52 @@ class Cmd(object):
|
|||||||
self.result.runtime = '%02d:%02d' % (m, s)
|
self.result.runtime = '%02d:%02d' % (m, s)
|
||||||
self.result.result = 'SKIP'
|
self.result.result = 'SKIP'
|
||||||
|
|
||||||
def log(self, logger, options):
|
def log(self, options):
|
||||||
"""
|
"""
|
||||||
This function is responsible for writing all output. This includes
|
This function is responsible for writing all output. This includes
|
||||||
the console output, the logfile of all results (with timestamped
|
the console output, the logfile of all results (with timestamped
|
||||||
merged stdout and stderr), and for each test, the unmodified
|
merged stdout and stderr), and for each test, the unmodified
|
||||||
stdout/stderr/merged in it's own file.
|
stdout/stderr/merged in it's own file.
|
||||||
"""
|
"""
|
||||||
if logger is None:
|
|
||||||
return
|
|
||||||
|
|
||||||
logname = getpwuid(os.getuid()).pw_name
|
logname = getpwuid(os.getuid()).pw_name
|
||||||
user = ' (run as %s)' % (self.user if len(self.user) else logname)
|
user = ' (run as %s)' % (self.user if len(self.user) else logname)
|
||||||
msga = 'Test: %s%s ' % (self.pathname, user)
|
msga = 'Test: %s%s ' % (self.pathname, user)
|
||||||
msgb = '[%s] [%s]' % (self.result.runtime, self.result.result)
|
msgb = '[%s] [%s]\n' % (self.result.runtime, self.result.result)
|
||||||
pad = ' ' * (80 - (len(msga) + len(msgb)))
|
pad = ' ' * (80 - (len(msga) + len(msgb)))
|
||||||
|
result_line = msga + pad + msgb
|
||||||
|
|
||||||
# If -q is specified, only print a line for tests that didn't pass.
|
# The result line is always written to the log file. If -q was
|
||||||
# This means passing tests need to be logged as DEBUG, or the one
|
# specified only failures are written to the console, otherwise
|
||||||
# line summary will only be printed in the logfile for failures.
|
# the result line is written to the console.
|
||||||
|
write_log(bytearray(result_line, encoding='utf-8'), LOG_FILE)
|
||||||
if not options.quiet:
|
if not options.quiet:
|
||||||
logger.info('%s%s%s' % (msga, pad, msgb))
|
write_log(result_line, LOG_OUT)
|
||||||
elif self.result.result is not 'PASS':
|
elif options.quiet and self.result.result is not 'PASS':
|
||||||
logger.info('%s%s%s' % (msga, pad, msgb))
|
write_log(result_line, LOG_OUT)
|
||||||
else:
|
|
||||||
logger.debug('%s%s%s' % (msga, pad, msgb))
|
|
||||||
|
|
||||||
lines = sorted(self.result.stdout + self.result.stderr,
|
lines = sorted(self.result.stdout + self.result.stderr,
|
||||||
key=lambda x: x[0])
|
key=lambda x: x[0])
|
||||||
|
|
||||||
|
# Write timestamped output (stdout and stderr) to the logfile
|
||||||
for dt, line in lines:
|
for dt, line in lines:
|
||||||
logger.debug('%s %s' % (dt.strftime("%H:%M:%S.%f ")[:11], line))
|
timestamp = bytearray(dt.strftime("%H:%M:%S.%f ")[:11],
|
||||||
|
encoding='utf-8')
|
||||||
|
write_log(b'%s %s\n' % (timestamp, line), LOG_FILE)
|
||||||
|
|
||||||
|
# Write the separate stdout/stderr/merged files, if the data exists
|
||||||
if len(self.result.stdout):
|
if len(self.result.stdout):
|
||||||
with open(os.path.join(self.outputdir, 'stdout'), 'w') as out:
|
with open(os.path.join(self.outputdir, 'stdout'), 'wb') as out:
|
||||||
for _, line in self.result.stdout:
|
for _, line in self.result.stdout:
|
||||||
os.write(out.fileno(), '%s\n' % line)
|
os.write(out.fileno(), b'%s\n' % line)
|
||||||
if len(self.result.stderr):
|
if len(self.result.stderr):
|
||||||
with open(os.path.join(self.outputdir, 'stderr'), 'w') as err:
|
with open(os.path.join(self.outputdir, 'stderr'), 'wb') as err:
|
||||||
for _, line in self.result.stderr:
|
for _, line in self.result.stderr:
|
||||||
os.write(err.fileno(), '%s\n' % line)
|
os.write(err.fileno(), b'%s\n' % line)
|
||||||
if len(self.result.stdout) and len(self.result.stderr):
|
if len(self.result.stdout) and len(self.result.stderr):
|
||||||
with open(os.path.join(self.outputdir, 'merged'), 'w') as merged:
|
with open(os.path.join(self.outputdir, 'merged'), 'wb') as merged:
|
||||||
for _, line in lines:
|
for _, line in lines:
|
||||||
os.write(merged.fileno(), '%s\n' % line)
|
os.write(merged.fileno(), b'%s\n' % line)
|
||||||
|
|
||||||
|
|
||||||
class Test(Cmd):
|
class Test(Cmd):
|
||||||
@ -323,7 +330,7 @@ class Test(Cmd):
|
|||||||
(self.pathname, self.outputdir, self.timeout, self.pre,
|
(self.pathname, self.outputdir, self.timeout, self.pre,
|
||||||
pre_user, self.post, post_user, self.user, self.tags)
|
pre_user, self.post, post_user, self.user, self.tags)
|
||||||
|
|
||||||
def verify(self, logger):
|
def verify(self):
|
||||||
"""
|
"""
|
||||||
Check the pre/post scripts, user and Test. Omit the Test from this
|
Check the pre/post scripts, user and Test. Omit the Test from this
|
||||||
run if there are any problems.
|
run if there are any problems.
|
||||||
@ -333,19 +340,19 @@ class Test(Cmd):
|
|||||||
|
|
||||||
for f in [f for f in files if len(f)]:
|
for f in [f for f in files if len(f)]:
|
||||||
if not verify_file(f):
|
if not verify_file(f):
|
||||||
logger.info("Warning: Test '%s' not added to this run because"
|
write_log("Warning: Test '%s' not added to this run because"
|
||||||
" it failed verification." % f)
|
" it failed verification.\n" % f, LOG_ERR)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
for user in [user for user in users if len(user)]:
|
for user in [user for user in users if len(user)]:
|
||||||
if not verify_user(user, logger):
|
if not verify_user(user):
|
||||||
logger.info("Not adding Test '%s' to this run." %
|
write_log("Not adding Test '%s' to this run.\n" %
|
||||||
self.pathname)
|
self.pathname, LOG_ERR)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def run(self, logger, options):
|
def run(self, options):
|
||||||
"""
|
"""
|
||||||
Create Cmd instances for the pre/post scripts. If the pre script
|
Create Cmd instances for the pre/post scripts. If the pre script
|
||||||
doesn't pass, skip this Test. Run the post script regardless.
|
doesn't pass, skip this Test. Run the post script regardless.
|
||||||
@ -363,18 +370,18 @@ class Test(Cmd):
|
|||||||
if len(pretest.pathname):
|
if len(pretest.pathname):
|
||||||
pretest.run(options)
|
pretest.run(options)
|
||||||
cont = pretest.result.result is 'PASS'
|
cont = pretest.result.result is 'PASS'
|
||||||
pretest.log(logger, options)
|
pretest.log(options)
|
||||||
|
|
||||||
if cont:
|
if cont:
|
||||||
test.run(options)
|
test.run(options)
|
||||||
else:
|
else:
|
||||||
test.skip()
|
test.skip()
|
||||||
|
|
||||||
test.log(logger, options)
|
test.log(options)
|
||||||
|
|
||||||
if len(posttest.pathname):
|
if len(posttest.pathname):
|
||||||
posttest.run(options)
|
posttest.run(options)
|
||||||
posttest.log(logger, options)
|
posttest.log(options)
|
||||||
|
|
||||||
|
|
||||||
class TestGroup(Test):
|
class TestGroup(Test):
|
||||||
@ -398,7 +405,7 @@ class TestGroup(Test):
|
|||||||
(self.pathname, self.outputdir, self.tests, self.timeout,
|
(self.pathname, self.outputdir, self.tests, self.timeout,
|
||||||
self.pre, pre_user, self.post, post_user, self.user, self.tags)
|
self.pre, pre_user, self.post, post_user, self.user, self.tags)
|
||||||
|
|
||||||
def verify(self, logger):
|
def verify(self):
|
||||||
"""
|
"""
|
||||||
Check the pre/post scripts, user and tests in this TestGroup. Omit
|
Check the pre/post scripts, user and tests in this TestGroup. Omit
|
||||||
the TestGroup entirely, or simply delete the relevant tests in the
|
the TestGroup entirely, or simply delete the relevant tests in the
|
||||||
@ -416,34 +423,34 @@ class TestGroup(Test):
|
|||||||
|
|
||||||
for f in [f for f in auxfiles if len(f)]:
|
for f in [f for f in auxfiles if len(f)]:
|
||||||
if self.pathname != os.path.dirname(f):
|
if self.pathname != os.path.dirname(f):
|
||||||
logger.info("Warning: TestGroup '%s' not added to this run. "
|
write_log("Warning: TestGroup '%s' not added to this run. "
|
||||||
"Auxiliary script '%s' exists in a different "
|
"Auxiliary script '%s' exists in a different "
|
||||||
"directory." % (self.pathname, f))
|
"directory.\n" % (self.pathname, f), LOG_ERR)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if not verify_file(f):
|
if not verify_file(f):
|
||||||
logger.info("Warning: TestGroup '%s' not added to this run. "
|
write_log("Warning: TestGroup '%s' not added to this run. "
|
||||||
"Auxiliary script '%s' failed verification." %
|
"Auxiliary script '%s' failed verification.\n" %
|
||||||
(self.pathname, f))
|
(self.pathname, f), LOG_ERR)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
for user in [user for user in users if len(user)]:
|
for user in [user for user in users if len(user)]:
|
||||||
if not verify_user(user, logger):
|
if not verify_user(user):
|
||||||
logger.info("Not adding TestGroup '%s' to this run." %
|
write_log("Not adding TestGroup '%s' to this run.\n" %
|
||||||
self.pathname)
|
self.pathname, LOG_ERR)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# If one of the tests is invalid, delete it, log it, and drive on.
|
# If one of the tests is invalid, delete it, log it, and drive on.
|
||||||
for test in self.tests:
|
for test in self.tests:
|
||||||
if not verify_file(os.path.join(self.pathname, test)):
|
if not verify_file(os.path.join(self.pathname, test)):
|
||||||
del self.tests[self.tests.index(test)]
|
del self.tests[self.tests.index(test)]
|
||||||
logger.info("Warning: Test '%s' removed from TestGroup '%s' "
|
write_log("Warning: Test '%s' removed from TestGroup '%s' "
|
||||||
"because it failed verification." %
|
"because it failed verification.\n" %
|
||||||
(test, self.pathname))
|
(test, self.pathname), LOG_ERR)
|
||||||
|
|
||||||
return len(self.tests) is not 0
|
return len(self.tests) is not 0
|
||||||
|
|
||||||
def run(self, logger, options):
|
def run(self, options):
|
||||||
"""
|
"""
|
||||||
Create Cmd instances for the pre/post scripts. If the pre script
|
Create Cmd instances for the pre/post scripts. If the pre script
|
||||||
doesn't pass, skip all the tests in this TestGroup. Run the post
|
doesn't pass, skip all the tests in this TestGroup. Run the post
|
||||||
@ -464,7 +471,7 @@ class TestGroup(Test):
|
|||||||
if len(pretest.pathname):
|
if len(pretest.pathname):
|
||||||
pretest.run(options)
|
pretest.run(options)
|
||||||
cont = pretest.result.result is 'PASS'
|
cont = pretest.result.result is 'PASS'
|
||||||
pretest.log(logger, options)
|
pretest.log(options)
|
||||||
|
|
||||||
for fname in self.tests:
|
for fname in self.tests:
|
||||||
test = Cmd(os.path.join(self.pathname, fname),
|
test = Cmd(os.path.join(self.pathname, fname),
|
||||||
@ -475,11 +482,11 @@ class TestGroup(Test):
|
|||||||
else:
|
else:
|
||||||
test.skip()
|
test.skip()
|
||||||
|
|
||||||
test.log(logger, options)
|
test.log(options)
|
||||||
|
|
||||||
if len(posttest.pathname):
|
if len(posttest.pathname):
|
||||||
posttest.run(options)
|
posttest.run(options)
|
||||||
posttest.log(logger, options)
|
posttest.log(options)
|
||||||
|
|
||||||
|
|
||||||
class TestRun(object):
|
class TestRun(object):
|
||||||
@ -491,7 +498,7 @@ class TestRun(object):
|
|||||||
self.starttime = time()
|
self.starttime = time()
|
||||||
self.timestamp = datetime.now().strftime('%Y%m%dT%H%M%S')
|
self.timestamp = datetime.now().strftime('%Y%m%dT%H%M%S')
|
||||||
self.outputdir = os.path.join(options.outputdir, self.timestamp)
|
self.outputdir = os.path.join(options.outputdir, self.timestamp)
|
||||||
self.logger = self.setup_logging(options)
|
self.setup_logging(options)
|
||||||
self.defaults = [
|
self.defaults = [
|
||||||
('outputdir', BASEDIR),
|
('outputdir', BASEDIR),
|
||||||
('quiet', False),
|
('quiet', False),
|
||||||
@ -524,7 +531,7 @@ class TestRun(object):
|
|||||||
for prop in Test.props:
|
for prop in Test.props:
|
||||||
setattr(test, prop, getattr(options, prop))
|
setattr(test, prop, getattr(options, prop))
|
||||||
|
|
||||||
if test.verify(self.logger):
|
if test.verify():
|
||||||
self.tests[pathname] = test
|
self.tests[pathname] = test
|
||||||
|
|
||||||
def addtestgroup(self, dirname, filenames, options):
|
def addtestgroup(self, dirname, filenames, options):
|
||||||
@ -546,9 +553,9 @@ class TestRun(object):
|
|||||||
self.testgroups[dirname] = testgroup
|
self.testgroups[dirname] = testgroup
|
||||||
self.testgroups[dirname].tests = sorted(filenames)
|
self.testgroups[dirname].tests = sorted(filenames)
|
||||||
|
|
||||||
testgroup.verify(self.logger)
|
testgroup.verify()
|
||||||
|
|
||||||
def read(self, logger, options):
|
def read(self, options):
|
||||||
"""
|
"""
|
||||||
Read in the specified runfile, and apply the TestRun properties
|
Read in the specified runfile, and apply the TestRun properties
|
||||||
listed in the 'DEFAULT' section to our TestRun. Then read each
|
listed in the 'DEFAULT' section to our TestRun. Then read each
|
||||||
@ -589,7 +596,7 @@ class TestRun(object):
|
|||||||
# Repopulate tests using eval to convert the string to a list
|
# Repopulate tests using eval to convert the string to a list
|
||||||
testgroup.tests = eval(config.get(section, 'tests'))
|
testgroup.tests = eval(config.get(section, 'tests'))
|
||||||
|
|
||||||
if testgroup.verify(logger):
|
if testgroup.verify():
|
||||||
self.testgroups[section] = testgroup
|
self.testgroups[section] = testgroup
|
||||||
else:
|
else:
|
||||||
test = Test(section)
|
test = Test(section)
|
||||||
@ -598,7 +605,7 @@ class TestRun(object):
|
|||||||
if config.has_option(sect, prop):
|
if config.has_option(sect, prop):
|
||||||
setattr(test, prop, config.get(sect, prop))
|
setattr(test, prop, config.get(sect, prop))
|
||||||
|
|
||||||
if test.verify(logger):
|
if test.verify():
|
||||||
self.tests[section] = test
|
self.tests[section] = test
|
||||||
|
|
||||||
def write(self, options):
|
def write(self, options):
|
||||||
@ -661,42 +668,23 @@ class TestRun(object):
|
|||||||
|
|
||||||
def setup_logging(self, options):
|
def setup_logging(self, options):
|
||||||
"""
|
"""
|
||||||
Two loggers are set up here. The first is for the logfile which
|
This funtion creates the output directory and gets a file object
|
||||||
will contain one line summarizing the test, including the test
|
for the logfile. This function must be called before write_log()
|
||||||
name, result, and running time. This logger will also capture the
|
can be used.
|
||||||
timestamped combined stdout and stderr of each run. The second
|
|
||||||
logger is optional console output, which will contain only the one
|
|
||||||
line summary. The loggers are initialized at two different levels
|
|
||||||
to facilitate segregating the output.
|
|
||||||
"""
|
"""
|
||||||
if options.dryrun is True:
|
if options.dryrun is True:
|
||||||
return
|
return
|
||||||
|
|
||||||
testlogger = logging.getLogger(__name__)
|
global LOG_FILE_OBJ
|
||||||
testlogger.setLevel(logging.DEBUG)
|
|
||||||
|
|
||||||
if options.cmd is not 'wrconfig':
|
if options.cmd is not 'wrconfig':
|
||||||
try:
|
try:
|
||||||
old = os.umask(0)
|
old = os.umask(0)
|
||||||
os.makedirs(self.outputdir, mode=0o777)
|
os.makedirs(self.outputdir, mode=0o777)
|
||||||
os.umask(old)
|
os.umask(old)
|
||||||
|
filename = os.path.join(self.outputdir, 'log')
|
||||||
|
LOG_FILE_OBJ = open(filename, buffering=0, mode='wb')
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
fail('%s' % e)
|
fail('%s' % e)
|
||||||
filename = os.path.join(self.outputdir, 'log')
|
|
||||||
|
|
||||||
logfile = logging.FileHandler(filename)
|
|
||||||
logfile.setLevel(logging.DEBUG)
|
|
||||||
logfilefmt = logging.Formatter('%(message)s')
|
|
||||||
logfile.setFormatter(logfilefmt)
|
|
||||||
testlogger.addHandler(logfile)
|
|
||||||
|
|
||||||
cons = logging.StreamHandler()
|
|
||||||
cons.setLevel(logging.INFO)
|
|
||||||
consfmt = logging.Formatter('%(message)s')
|
|
||||||
cons.setFormatter(consfmt)
|
|
||||||
testlogger.addHandler(cons)
|
|
||||||
|
|
||||||
return testlogger
|
|
||||||
|
|
||||||
def run(self, options):
|
def run(self, options):
|
||||||
"""
|
"""
|
||||||
@ -713,14 +701,14 @@ class TestRun(object):
|
|||||||
if not os.path.exists(logsymlink):
|
if not os.path.exists(logsymlink):
|
||||||
os.symlink(self.outputdir, logsymlink)
|
os.symlink(self.outputdir, logsymlink)
|
||||||
else:
|
else:
|
||||||
print('Could not make a symlink to directory %s' % (
|
write_log('Could not make a symlink to directory %s\n' %
|
||||||
self.outputdir))
|
self.outputdir, LOG_ERR)
|
||||||
iteration = 0
|
iteration = 0
|
||||||
while iteration < options.iterations:
|
while iteration < options.iterations:
|
||||||
for test in sorted(self.tests.keys()):
|
for test in sorted(self.tests.keys()):
|
||||||
self.tests[test].run(self.logger, options)
|
self.tests[test].run(options)
|
||||||
for testgroup in sorted(self.testgroups.keys()):
|
for testgroup in sorted(self.testgroups.keys()):
|
||||||
self.testgroups[testgroup].run(self.logger, options)
|
self.testgroups[testgroup].run(options)
|
||||||
iteration += 1
|
iteration += 1
|
||||||
|
|
||||||
def summary(self):
|
def summary(self):
|
||||||
@ -748,6 +736,23 @@ class TestRun(object):
|
|||||||
return 0
|
return 0
|
||||||
|
|
||||||
|
|
||||||
|
def write_log(msg, target):
|
||||||
|
"""
|
||||||
|
Write the provided message to standard out, standard error or
|
||||||
|
the logfile. If specifying LOG_FILE, then `msg` must be a bytes
|
||||||
|
like object. This way we can still handle output from tests that
|
||||||
|
may be in unexpected encodings.
|
||||||
|
"""
|
||||||
|
if target == LOG_OUT:
|
||||||
|
os.write(sys.stdout.fileno(), bytearray(msg, encoding='utf-8'))
|
||||||
|
elif target == LOG_ERR:
|
||||||
|
os.write(sys.stderr.fileno(), bytearray(msg, encoding='utf-8'))
|
||||||
|
elif target == LOG_FILE:
|
||||||
|
os.write(LOG_FILE_OBJ.fileno(), msg)
|
||||||
|
else:
|
||||||
|
fail('log_msg called with unknown target "%s"' % target)
|
||||||
|
|
||||||
|
|
||||||
def verify_file(pathname):
|
def verify_file(pathname):
|
||||||
"""
|
"""
|
||||||
Verify that the supplied pathname is an executable regular file.
|
Verify that the supplied pathname is an executable regular file.
|
||||||
@ -763,7 +768,7 @@ def verify_file(pathname):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
def verify_user(user, logger):
|
def verify_user(user):
|
||||||
"""
|
"""
|
||||||
Verify that the specified user exists on this system, and can execute
|
Verify that the specified user exists on this system, and can execute
|
||||||
sudo without being prompted for a password.
|
sudo without being prompted for a password.
|
||||||
@ -776,13 +781,15 @@ def verify_user(user, logger):
|
|||||||
try:
|
try:
|
||||||
getpwnam(user)
|
getpwnam(user)
|
||||||
except KeyError:
|
except KeyError:
|
||||||
logger.info("Warning: user '%s' does not exist.", user)
|
write_log("Warning: user '%s' does not exist.\n" % user,
|
||||||
|
LOG_ERR)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
p = Popen(testcmd)
|
p = Popen(testcmd)
|
||||||
p.wait()
|
p.wait()
|
||||||
if p.returncode is not 0:
|
if p.returncode is not 0:
|
||||||
logger.info("Warning: user '%s' cannot use passwordless sudo.", user)
|
write_log("Warning: user '%s' cannot use passwordless sudo.\n" % user,
|
||||||
|
LOG_ERR)
|
||||||
return False
|
return False
|
||||||
else:
|
else:
|
||||||
Cmd.verified_users.append(user)
|
Cmd.verified_users.append(user)
|
||||||
@ -810,7 +817,7 @@ def find_tests(testrun, options):
|
|||||||
|
|
||||||
|
|
||||||
def fail(retstr, ret=1):
|
def fail(retstr, ret=1):
|
||||||
print('%s: %s' % (argv[0], retstr))
|
print('%s: %s' % (sys.argv[0], retstr))
|
||||||
exit(ret)
|
exit(ret)
|
||||||
|
|
||||||
|
|
||||||
@ -900,7 +907,7 @@ def main():
|
|||||||
if options.cmd is 'runtests':
|
if options.cmd is 'runtests':
|
||||||
find_tests(testrun, options)
|
find_tests(testrun, options)
|
||||||
elif options.cmd is 'rdconfig':
|
elif options.cmd is 'rdconfig':
|
||||||
testrun.read(testrun.logger, options)
|
testrun.read(options)
|
||||||
elif options.cmd is 'wrconfig':
|
elif options.cmd is 'wrconfig':
|
||||||
find_tests(testrun, options)
|
find_tests(testrun, options)
|
||||||
testrun.write(options)
|
testrun.write(options)
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
#!/usr/bin/python
|
#!/usr/bin/python3
|
||||||
|
|
||||||
#
|
#
|
||||||
# This file and its contents are supplied under the terms of the
|
# This file and its contents are supplied under the terms of the
|
||||||
@ -15,6 +15,8 @@
|
|||||||
# Copyright (c) 2017 by Delphix. All rights reserved.
|
# Copyright (c) 2017 by Delphix. All rights reserved.
|
||||||
# Copyright (c) 2018 by Lawrence Livermore National Security, LLC.
|
# Copyright (c) 2018 by Lawrence Livermore National Security, LLC.
|
||||||
#
|
#
|
||||||
|
# This script must remain compatible with Python 2.6+ and Python 3.4+.
|
||||||
|
#
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
@ -146,10 +146,10 @@ export ZFS_FILES='zdb
|
|||||||
zpool
|
zpool
|
||||||
ztest
|
ztest
|
||||||
raidz_test
|
raidz_test
|
||||||
arc_summary.py
|
arc_summary
|
||||||
arc_summary3.py
|
arc_summary3
|
||||||
arcstat.py
|
arcstat
|
||||||
dbufstat.py
|
dbufstat
|
||||||
zed
|
zed
|
||||||
zgenhostid
|
zgenhostid
|
||||||
zstreamdump'
|
zstreamdump'
|
||||||
|
@ -37,7 +37,7 @@
|
|||||||
# 2. Store output from dbufs kstat
|
# 2. Store output from dbufs kstat
|
||||||
# 3. Store output from dbufstats kstat
|
# 3. Store output from dbufstats kstat
|
||||||
# 4. Compare stats presented in dbufstats with stat generated using
|
# 4. Compare stats presented in dbufstats with stat generated using
|
||||||
# dbufstat.py and the dbufs kstat output
|
# dbufstat and the dbufs kstat output
|
||||||
#
|
#
|
||||||
|
|
||||||
DBUFSTATS_FILE=$(mktemp $TEST_BASE_DIR/dbufstats.out.XXXXXX)
|
DBUFSTATS_FILE=$(mktemp $TEST_BASE_DIR/dbufstats.out.XXXXXX)
|
||||||
@ -56,7 +56,7 @@ function testdbufstat # stat_name dbufstat_filter
|
|||||||
[[ -n "$2" ]] && filter="-F $2"
|
[[ -n "$2" ]] && filter="-F $2"
|
||||||
|
|
||||||
from_dbufstat=$(grep -w "$name" "$DBUFSTATS_FILE" | awk '{ print $3 }')
|
from_dbufstat=$(grep -w "$name" "$DBUFSTATS_FILE" | awk '{ print $3 }')
|
||||||
from_dbufs=$(dbufstat.py -bxn -i "$DBUFS_FILE" "$filter" | wc -l)
|
from_dbufs=$(dbufstat -bxn -i "$DBUFS_FILE" "$filter" | wc -l)
|
||||||
|
|
||||||
within_tolerance $from_dbufstat $from_dbufs 9 \
|
within_tolerance $from_dbufstat $from_dbufs 9 \
|
||||||
|| log_fail "Stat $name exceeded tolerance"
|
|| log_fail "Stat $name exceeded tolerance"
|
||||||
|
@ -62,18 +62,18 @@ objid=$(stat --format="%i" "$TESTDIR/file")
|
|||||||
log_note "Object ID for $TESTDIR/file is $objid"
|
log_note "Object ID for $TESTDIR/file is $objid"
|
||||||
|
|
||||||
log_must eval "cat /proc/spl/kstat/zfs/dbufs > $DBUFS_FILE"
|
log_must eval "cat /proc/spl/kstat/zfs/dbufs > $DBUFS_FILE"
|
||||||
dbuf=$(dbufstat.py -bxn -i "$DBUFS_FILE" -F "object=$objid" | wc -l)
|
dbuf=$(dbufstat -bxn -i "$DBUFS_FILE" -F "object=$objid" | wc -l)
|
||||||
mru=$(dbufstat.py -bxn -i "$DBUFS_FILE" -F "object=$objid,list=1" | wc -l)
|
mru=$(dbufstat -bxn -i "$DBUFS_FILE" -F "object=$objid,list=1" | wc -l)
|
||||||
mfu=$(dbufstat.py -bxn -i "$DBUFS_FILE" -F "object=$objid,list=3" | wc -l)
|
mfu=$(dbufstat -bxn -i "$DBUFS_FILE" -F "object=$objid,list=3" | wc -l)
|
||||||
log_note "dbuf count is $dbuf, mru count is $mru, mfu count is $mfu"
|
log_note "dbuf count is $dbuf, mru count is $mru, mfu count is $mfu"
|
||||||
verify_ne "0" "$mru" "mru count"
|
verify_ne "0" "$mru" "mru count"
|
||||||
verify_eq "0" "$mfu" "mfu count"
|
verify_eq "0" "$mfu" "mfu count"
|
||||||
|
|
||||||
log_must eval "cat $TESTDIR/file > /dev/null"
|
log_must eval "cat $TESTDIR/file > /dev/null"
|
||||||
log_must eval "cat /proc/spl/kstat/zfs/dbufs > $DBUFS_FILE"
|
log_must eval "cat /proc/spl/kstat/zfs/dbufs > $DBUFS_FILE"
|
||||||
dbuf=$(dbufstat.py -bxn -i "$DBUFS_FILE" -F "object=$objid" | wc -l)
|
dbuf=$(dbufstat -bxn -i "$DBUFS_FILE" -F "object=$objid" | wc -l)
|
||||||
mru=$(dbufstat.py -bxn -i "$DBUFS_FILE" -F "object=$objid,list=1" | wc -l)
|
mru=$(dbufstat -bxn -i "$DBUFS_FILE" -F "object=$objid,list=1" | wc -l)
|
||||||
mfu=$(dbufstat.py -bxn -i "$DBUFS_FILE" -F "object=$objid,list=3" | wc -l)
|
mfu=$(dbufstat -bxn -i "$DBUFS_FILE" -F "object=$objid,list=3" | wc -l)
|
||||||
log_note "dbuf count is $dbuf, mru count is $mru, mfu count is $mfu"
|
log_note "dbuf count is $dbuf, mru count is $mru, mfu count is $mfu"
|
||||||
verify_ne "0" "$mfu" "mfu count"
|
verify_ne "0" "$mfu" "mfu count"
|
||||||
|
|
||||||
|
@ -46,7 +46,6 @@ dist_pkgdata_SCRIPTS = \
|
|||||||
arcstat_001_pos.ksh \
|
arcstat_001_pos.ksh \
|
||||||
arc_summary_001_pos.ksh \
|
arc_summary_001_pos.ksh \
|
||||||
arc_summary_002_neg.ksh \
|
arc_summary_002_neg.ksh \
|
||||||
arc_summary3_001_pos.ksh \
|
|
||||||
dbufstat_001_pos.ksh
|
dbufstat_001_pos.ksh
|
||||||
|
|
||||||
dist_pkgdata_DATA = \
|
dist_pkgdata_DATA = \
|
||||||
|
@ -1,56 +0,0 @@
|
|||||||
#! /bin/ksh -p
|
|
||||||
#
|
|
||||||
# CDDL HEADER START
|
|
||||||
#
|
|
||||||
# The contents of this file are subject to the terms of the
|
|
||||||
# Common Development and Distribution License (the "License").
|
|
||||||
# You may not use this file except in compliance with the License.
|
|
||||||
#
|
|
||||||
# You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
|
|
||||||
# or http://www.opensolaris.org/os/licensing.
|
|
||||||
# See the License for the specific language governing permissions
|
|
||||||
# and limitations under the License.
|
|
||||||
#
|
|
||||||
# When distributing Covered Code, include this CDDL HEADER in each
|
|
||||||
# file and include the License file at usr/src/OPENSOLARIS.LICENSE.
|
|
||||||
# If applicable, add the following below this CDDL HEADER, with the
|
|
||||||
# fields enclosed by brackets "[]" replaced with your own identifying
|
|
||||||
# information: Portions Copyright [yyyy] [name of copyright owner]
|
|
||||||
#
|
|
||||||
# CDDL HEADER END
|
|
||||||
#
|
|
||||||
|
|
||||||
#
|
|
||||||
# Copyright (c) 2015 by Lawrence Livermore National Security, LLC.
|
|
||||||
# All rights reserved.
|
|
||||||
#
|
|
||||||
|
|
||||||
. $STF_SUITE/include/libtest.shlib
|
|
||||||
|
|
||||||
# Keep the following test until Python 3 is installed on all test systems,
|
|
||||||
# then remove
|
|
||||||
python3 -V 2>&1 > /dev/null
|
|
||||||
if (( $? )); then
|
|
||||||
log_unsupported "Python3 is not installed"
|
|
||||||
fi
|
|
||||||
|
|
||||||
|
|
||||||
# Some systems have Python 3 installed, but only older versions that don't
|
|
||||||
# have the subprocess.run() functionality. We catch these with a separate
|
|
||||||
# test. Remove this when all systems have reached 3.5 or greater
|
|
||||||
VERSIONPYTEST=$(python3 -V)
|
|
||||||
if [[ ${VERSIONPYTEST:9:1} -lt 5 ]]; then
|
|
||||||
log_unsupported "Python3 must be version 3.5 or greater"
|
|
||||||
fi
|
|
||||||
|
|
||||||
|
|
||||||
set -A args "" "-a" "-d" "-p 1" "-g" "-s arc" "-r"
|
|
||||||
log_assert "arc_summary3.py generates output and doesn't return an error code"
|
|
||||||
|
|
||||||
typeset -i i=0
|
|
||||||
while [[ $i -lt ${#args[*]} ]]; do
|
|
||||||
log_must eval "arc_summary3.py ${args[i]} > /dev/null"
|
|
||||||
((i = i + 1))
|
|
||||||
done
|
|
||||||
|
|
||||||
log_pass "arc_summary3.py generates output and doesn't return an error code"
|
|
@ -27,17 +27,34 @@
|
|||||||
|
|
||||||
. $STF_SUITE/include/libtest.shlib
|
. $STF_SUITE/include/libtest.shlib
|
||||||
|
|
||||||
set -A args "" "-a" "-d" "-p 1"
|
log_assert "arc_summary generates output and doesn't return an error code"
|
||||||
|
|
||||||
log_assert "arc_summary.py generates output and doesn't return an error code"
|
# Depending on which version of arc_summary is installed some command
|
||||||
|
# line options may not be available. The python3 version includes
|
||||||
|
# several additional flags.
|
||||||
|
python3 -V 2>&1 > /dev/null
|
||||||
|
if (( $? )); then
|
||||||
|
# Some systems have Python 3 installed, but only older versions
|
||||||
|
# that don't have the subprocess.run() functionality. We catch
|
||||||
|
# these with a separate test. Remove this when all systems have
|
||||||
|
# reached 3.5 or greater
|
||||||
|
VERSIONPYTEST=$(python3 -V)
|
||||||
|
if [[ ${VERSIONPYTEST:9:1} -lt 5 ]]; then
|
||||||
|
set -A args "" "-a" "-d" "-p 1"
|
||||||
|
else
|
||||||
|
set -A args "" "-a" "-d" "-p 1" "-g" "-s arc" "-r"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
set -A args "" "-a" "-d" "-p 1"
|
||||||
|
fi
|
||||||
|
|
||||||
typeset -i i=0
|
typeset -i i=0
|
||||||
while [[ $i -lt ${#args[*]} ]]; do
|
while [[ $i -lt ${#args[*]} ]]; do
|
||||||
log_must eval "arc_summary.py ${args[i]} > /dev/null"
|
log_must eval "arc_summary ${args[i]} > /dev/null"
|
||||||
((i = i + 1))
|
((i = i + 1))
|
||||||
done
|
done
|
||||||
|
|
||||||
log_must eval "arc_summary.py | head > /dev/null"
|
log_must eval "arc_summary | head > /dev/null"
|
||||||
log_must eval "arc_summary.py | head -1 > /dev/null"
|
log_must eval "arc_summary | head -1 > /dev/null"
|
||||||
|
|
||||||
log_pass "arc_summary.py generates output and doesn't return an error code"
|
log_pass "arc_summary generates output and doesn't return an error code"
|
||||||
|
@ -27,12 +27,12 @@
|
|||||||
|
|
||||||
. $STF_SUITE/include/libtest.shlib
|
. $STF_SUITE/include/libtest.shlib
|
||||||
|
|
||||||
typeset args=("-x" "-r" "-5" "-p 7" "--err" "-@")
|
typeset args=("-x" "-5" "-p 7" "--err" "-@")
|
||||||
|
|
||||||
log_assert "arc_summary.py generates an error code with invalid options"
|
log_assert "arc_summary generates an error code with invalid options"
|
||||||
|
|
||||||
for arg in "${args[@]}"; do
|
for arg in "${args[@]}"; do
|
||||||
log_mustnot eval "arc_summary.py $arg > /dev/null"
|
log_mustnot eval "arc_summary $arg > /dev/null"
|
||||||
done
|
done
|
||||||
|
|
||||||
log_pass "arc_summary.py generates an error code with invalid options"
|
log_pass "arc_summary generates an error code with invalid options"
|
||||||
|
@ -30,12 +30,12 @@
|
|||||||
set -A args "" "-s \",\"" "-x" "-v" \
|
set -A args "" "-s \",\"" "-x" "-v" \
|
||||||
"-f time,hit%,dh%,ph%,mh%"
|
"-f time,hit%,dh%,ph%,mh%"
|
||||||
|
|
||||||
log_assert "arcstat.py generates output and doesn't return an error code"
|
log_assert "arcstat generates output and doesn't return an error code"
|
||||||
|
|
||||||
typeset -i i=0
|
typeset -i i=0
|
||||||
while [[ $i -lt ${#args[*]} ]]; do
|
while [[ $i -lt ${#args[*]} ]]; do
|
||||||
log_must eval "arcstat.py ${args[i]} > /dev/null"
|
log_must eval "arcstat ${args[i]} > /dev/null"
|
||||||
((i = i + 1))
|
((i = i + 1))
|
||||||
done
|
done
|
||||||
log_pass "arcstat.py generates output and doesn't return an error code"
|
log_pass "arcstat generates output and doesn't return an error code"
|
||||||
|
|
||||||
|
@ -32,19 +32,20 @@
|
|||||||
. $STF_SUITE/include/libtest.shlib
|
. $STF_SUITE/include/libtest.shlib
|
||||||
. $STF_SUITE/tests/functional/cli_user/misc/misc.cfg
|
. $STF_SUITE/tests/functional/cli_user/misc/misc.cfg
|
||||||
|
|
||||||
if poolexists $TESTPOOL.virt
|
if poolexists $TESTPOOL.virt; then
|
||||||
then
|
|
||||||
log_must zpool destroy $TESTPOOL.virt
|
log_must zpool destroy $TESTPOOL.virt
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if poolexists v1-pool
|
if poolexists v1-pool; then
|
||||||
then
|
|
||||||
log_must zpool destroy v1-pool
|
log_must zpool destroy v1-pool
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [[ -f $TEST_BASE_DIR/zfstest_datastream.dat ]]
|
log_must rm -f $TEST_BASE_DIR/zfstest_datastream.dat
|
||||||
then
|
log_must rm -f $TEST_BASE_DIR/disk1.dat $TEST_BASE_DIR/disk2.dat \
|
||||||
log_must rm -f $TEST_BASE_DIR/zfstest_datastream.dat
|
$TEST_BASE_DIR/disk3.dat $TEST_BASE_DIR/disk-additional.dat \
|
||||||
fi
|
$TEST_BASE_DIR/disk-export.dat $TEST_BASE_DIR/disk-offline.dat \
|
||||||
|
$TEST_BASE_DIR/disk-spare1.dat $TEST_BASE_DIR/disk-spare2.dat
|
||||||
|
log_must rm -f $TEST_BASE_DIR/zfs-pool-v1.dat \
|
||||||
|
$TEST_BASE_DIR/zfs-pool-v1.dat.bz2
|
||||||
|
|
||||||
default_cleanup
|
default_cleanup
|
||||||
|
@ -29,15 +29,15 @@
|
|||||||
|
|
||||||
set -A args "" "-b" "-d" "-r" "-v" "-s \",\"" "-x" "-n"
|
set -A args "" "-b" "-d" "-r" "-v" "-s \",\"" "-x" "-n"
|
||||||
|
|
||||||
log_assert "dbufstat.py generates output and doesn't return an error code"
|
log_assert "dbufstat generates output and doesn't return an error code"
|
||||||
|
|
||||||
typeset -i i=0
|
typeset -i i=0
|
||||||
while [[ $i -lt ${#args[*]} ]]; do
|
while [[ $i -lt ${#args[*]} ]]; do
|
||||||
log_must eval "dbufstat.py ${args[i]} > /dev/null"
|
log_must eval "dbufstat ${args[i]} > /dev/null"
|
||||||
((i = i + 1))
|
((i = i + 1))
|
||||||
done
|
done
|
||||||
|
|
||||||
# A simple test of dbufstat.py filter functionality
|
# A simple test of dbufstat filter functionality
|
||||||
log_must eval "dbufstat.py -F object=10,dbc=1,pool=$TESTPOOL > /dev/null"
|
log_must eval "dbufstat -F object=10,dbc=1,pool=$TESTPOOL > /dev/null"
|
||||||
|
|
||||||
log_pass "dbufstat.py generates output and doesn't return an error code"
|
log_pass "dbufstat generates output and doesn't return an error code"
|
||||||
|
1
tests/zfs-tests/tests/functional/pyzfs/.gitignore
vendored
Normal file
1
tests/zfs-tests/tests/functional/pyzfs/.gitignore
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
pyzfs_unittest.ksh
|
@ -1,4 +1,18 @@
|
|||||||
pkgdatadir = $(datadir)/@PACKAGE@/zfs-tests/tests/functional/pyzfs
|
pkgpyzfsdir = $(datadir)/@PACKAGE@/zfs-tests/tests/functional/pyzfs
|
||||||
|
pkgpyzfs_SCRIPTS = \
|
||||||
dist_pkgdata_SCRIPTS = \
|
|
||||||
pyzfs_unittest.ksh
|
pyzfs_unittest.ksh
|
||||||
|
|
||||||
|
EXTRA_DIST = \
|
||||||
|
pyzfs_unittest.ksh.in
|
||||||
|
|
||||||
|
#
|
||||||
|
# The pyzfs module is built either for Python 2 or Python 3. In order
|
||||||
|
# to properly test it the unit tests must be updated to the matching vesion.
|
||||||
|
#
|
||||||
|
$(pkgpyzfs_SCRIPTS):%:%.in
|
||||||
|
-$(SED) -e 's,@PYTHON\@,$(PYTHON),g' \
|
||||||
|
$< >'$@'
|
||||||
|
-chmod 775 $@
|
||||||
|
|
||||||
|
distclean-local::
|
||||||
|
-$(RM) $(pkgpyzfs_SCRIPTS)
|
||||||
|
@ -28,7 +28,7 @@
|
|||||||
verify_runnable "global"
|
verify_runnable "global"
|
||||||
|
|
||||||
# Verify that the required dependencies for testing are installed.
|
# Verify that the required dependencies for testing are installed.
|
||||||
python -c "import cffi" 2>/dev/null
|
@PYTHON@ -c "import cffi" 2>/dev/null
|
||||||
if [ $? -eq 1 ]; then
|
if [ $? -eq 1 ]; then
|
||||||
log_unsupported "python-cffi not found by Python"
|
log_unsupported "python-cffi not found by Python"
|
||||||
fi
|
fi
|
||||||
@ -37,7 +37,7 @@ fi
|
|||||||
# only if pyzfs was not installed due to missing, build-time, dependencies; if
|
# only if pyzfs was not installed due to missing, build-time, dependencies; if
|
||||||
# we cannot load "libzfs_core" due to other reasons, for instance an API/ABI
|
# we cannot load "libzfs_core" due to other reasons, for instance an API/ABI
|
||||||
# mismatch, we want to report it.
|
# mismatch, we want to report it.
|
||||||
python -c '
|
@PYTHON@ -c '
|
||||||
import pkgutil, sys
|
import pkgutil, sys
|
||||||
sys.exit(pkgutil.find_loader("libzfs_core") is None)'
|
sys.exit(pkgutil.find_loader("libzfs_core") is None)'
|
||||||
if [ $? -eq 1 ]; then
|
if [ $? -eq 1 ]; then
|
||||||
@ -47,7 +47,7 @@ fi
|
|||||||
log_assert "Verify the nvlist and libzfs_core Python unittest run successfully"
|
log_assert "Verify the nvlist and libzfs_core Python unittest run successfully"
|
||||||
|
|
||||||
# NOTE: don't use log_must() here because it makes output unreadable
|
# NOTE: don't use log_must() here because it makes output unreadable
|
||||||
python -m unittest --verbose \
|
@PYTHON@ -m unittest --verbose \
|
||||||
libzfs_core.test.test_nvlist.TestNVList \
|
libzfs_core.test.test_nvlist.TestNVList \
|
||||||
libzfs_core.test.test_libzfs_core.ZFSTest
|
libzfs_core.test.test_libzfs_core.ZFSTest
|
||||||
if [ $? -ne 0 ]; then
|
if [ $? -ne 0 ]; then
|
Loading…
Reference in New Issue
Block a user