mirror of
https://git.proxmox.com/git/mirror_zfs.git
synced 2024-11-17 01:51:00 +03:00
4d469acd17
Add ARC structural breakdown, ARC types breakdown, ARC states breakdown similar to arc_summary. Additional cleanups included. Reviewed-by: Alexander Motin <mav@FreeBSD.org> Signed-off-by: Theera K. <tkittich@hotmail.com> Closes #16509
802 lines
28 KiB
Plaintext
Executable File
802 lines
28 KiB
Plaintext
Executable File
#!/usr/bin/env @PYTHON_SHEBANG@
|
|
#
|
|
# Print out ZFS ARC Statistics exported via kstat(1)
|
|
# For a definition of fields, or usage, use arcstat -v
|
|
#
|
|
# This script was originally a fork of the original arcstat.pl (0.1)
|
|
# by Neelakanth Nadgir, originally published on his Sun blog on
|
|
# 09/18/2007
|
|
# http://blogs.sun.com/realneel/entry/zfs_arc_statistics
|
|
#
|
|
# A new version aimed to improve upon the original by adding features
|
|
# and fixing bugs as needed. This version was maintained by Mike
|
|
# Harsch and was hosted in a public open source repository:
|
|
# http://github.com/mharsch/arcstat
|
|
#
|
|
# but has since moved to the illumos-gate repository.
|
|
#
|
|
# This Python port was written by John Hixson for FreeNAS, introduced
|
|
# in commit e2c29f:
|
|
# https://github.com/freenas/freenas
|
|
#
|
|
# and has been improved by many people since.
|
|
#
|
|
# CDDL HEADER START
|
|
#
|
|
# The contents of this file are subject to the terms of the
|
|
# Common Development and Distribution License, Version 1.0 only
|
|
# (the "License"). You may not use this file except in compliance
|
|
# with the License.
|
|
#
|
|
# You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
|
|
# or https://opensource.org/licenses/CDDL-1.0.
|
|
# See the License for the specific language governing permissions
|
|
# and limitations under the License.
|
|
#
|
|
# When distributing Covered Code, include this CDDL HEADER in each
|
|
# file and include the License file at usr/src/OPENSOLARIS.LICENSE.
|
|
# If applicable, add the following below this CDDL HEADER, with the
|
|
# fields enclosed by brackets "[]" replaced with your own identifying
|
|
# information: Portions Copyright [yyyy] [name of copyright owner]
|
|
#
|
|
# CDDL HEADER END
|
|
#
|
|
#
|
|
# Fields have a fixed width. Every interval, we fill the "v"
|
|
# hash with its corresponding value (v[field]=value) using calculate().
|
|
# @hdr is the array of fields that needs to be printed, so we
|
|
# just iterate over this array and print the values using our pretty printer.
|
|
#
|
|
# This script must remain compatible with Python 3.6+.
|
|
#
|
|
|
|
import sys
|
|
import time
|
|
import getopt
|
|
import re
|
|
import copy
|
|
|
|
from signal import signal, SIGINT, SIGWINCH, SIG_DFL
|
|
|
|
|
|
cols = {
|
|
# HDR: [Size, Scale, Description]
|
|
"time": [8, -1, "Time"],
|
|
"hits": [4, 1000, "ARC hits per second"],
|
|
"iohs": [4, 1000, "ARC I/O hits per second"],
|
|
"miss": [4, 1000, "ARC misses per second"],
|
|
"read": [4, 1000, "Total ARC accesses per second"],
|
|
"hit%": [4, 100, "ARC hit percentage"],
|
|
"ioh%": [4, 100, "ARC I/O hit percentage"],
|
|
"miss%": [5, 100, "ARC miss percentage"],
|
|
"dhit": [4, 1000, "Demand hits per second"],
|
|
"dioh": [4, 1000, "Demand I/O hits per second"],
|
|
"dmis": [4, 1000, "Demand misses per second"],
|
|
"dh%": [3, 100, "Demand hit percentage"],
|
|
"di%": [3, 100, "Demand I/O hit percentage"],
|
|
"dm%": [3, 100, "Demand miss percentage"],
|
|
"ddhit": [5, 1000, "Demand data hits per second"],
|
|
"ddioh": [5, 1000, "Demand data I/O hits per second"],
|
|
"ddmis": [5, 1000, "Demand data misses per second"],
|
|
"ddh%": [4, 100, "Demand data hit percentage"],
|
|
"ddi%": [4, 100, "Demand data I/O hit percentage"],
|
|
"ddm%": [4, 100, "Demand data miss percentage"],
|
|
"dmhit": [5, 1000, "Demand metadata hits per second"],
|
|
"dmioh": [5, 1000, "Demand metadata I/O hits per second"],
|
|
"dmmis": [5, 1000, "Demand metadata misses per second"],
|
|
"dmh%": [4, 100, "Demand metadata hit percentage"],
|
|
"dmi%": [4, 100, "Demand metadata I/O hit percentage"],
|
|
"dmm%": [4, 100, "Demand metadata miss percentage"],
|
|
"phit": [4, 1000, "Prefetch hits per second"],
|
|
"pioh": [4, 1000, "Prefetch I/O hits per second"],
|
|
"pmis": [4, 1000, "Prefetch misses per second"],
|
|
"ph%": [3, 100, "Prefetch hits percentage"],
|
|
"pi%": [3, 100, "Prefetch I/O hits percentage"],
|
|
"pm%": [3, 100, "Prefetch miss percentage"],
|
|
"pdhit": [5, 1000, "Prefetch data hits per second"],
|
|
"pdioh": [5, 1000, "Prefetch data I/O hits per second"],
|
|
"pdmis": [5, 1000, "Prefetch data misses per second"],
|
|
"pdh%": [4, 100, "Prefetch data hits percentage"],
|
|
"pdi%": [4, 100, "Prefetch data I/O hits percentage"],
|
|
"pdm%": [4, 100, "Prefetch data miss percentage"],
|
|
"pmhit": [5, 1000, "Prefetch metadata hits per second"],
|
|
"pmioh": [5, 1000, "Prefetch metadata I/O hits per second"],
|
|
"pmmis": [5, 1000, "Prefetch metadata misses per second"],
|
|
"pmh%": [4, 100, "Prefetch metadata hits percentage"],
|
|
"pmi%": [4, 100, "Prefetch metadata I/O hits percentage"],
|
|
"pmm%": [4, 100, "Prefetch metadata miss percentage"],
|
|
"mhit": [4, 1000, "Metadata hits per second"],
|
|
"mioh": [4, 1000, "Metadata I/O hits per second"],
|
|
"mmis": [4, 1000, "Metadata misses per second"],
|
|
"mread": [5, 1000, "Metadata accesses per second"],
|
|
"mh%": [3, 100, "Metadata hit percentage"],
|
|
"mi%": [3, 100, "Metadata I/O hit percentage"],
|
|
"mm%": [3, 100, "Metadata miss percentage"],
|
|
"arcsz": [5, 1024, "ARC size"],
|
|
"size": [5, 1024, "ARC size"],
|
|
"c": [5, 1024, "ARC target size"],
|
|
"mfu": [4, 1000, "MFU list hits per second"],
|
|
"mru": [4, 1000, "MRU list hits per second"],
|
|
"mfug": [4, 1000, "MFU ghost list hits per second"],
|
|
"mrug": [4, 1000, "MRU ghost list hits per second"],
|
|
"unc": [4, 1000, "Uncached list hits per second"],
|
|
"eskip": [5, 1000, "evict_skip per second"],
|
|
"el2skip": [7, 1000, "evict skip, due to l2 writes, per second"],
|
|
"el2cach": [7, 1024, "Size of L2 cached evictions per second"],
|
|
"el2el": [5, 1024, "Size of L2 eligible evictions per second"],
|
|
"el2mfu": [6, 1024, "Size of L2 eligible MFU evictions per second"],
|
|
"el2mru": [6, 1024, "Size of L2 eligible MRU evictions per second"],
|
|
"el2inel": [7, 1024, "Size of L2 ineligible evictions per second"],
|
|
"mtxmis": [6, 1000, "mutex_miss per second"],
|
|
"dread": [5, 1000, "Demand accesses per second"],
|
|
"ddread": [6, 1000, "Demand data accesses per second"],
|
|
"dmread": [6, 1000, "Demand metadata accesses per second"],
|
|
"pread": [5, 1000, "Prefetch accesses per second"],
|
|
"pdread": [6, 1000, "Prefetch data accesses per second"],
|
|
"pmread": [6, 1000, "Prefetch metadata accesses per second"],
|
|
"l2hits": [6, 1000, "L2ARC hits per second"],
|
|
"l2miss": [6, 1000, "L2ARC misses per second"],
|
|
"l2read": [6, 1000, "Total L2ARC accesses per second"],
|
|
"l2hit%": [6, 100, "L2ARC access hit percentage"],
|
|
"l2miss%": [7, 100, "L2ARC access miss percentage"],
|
|
"l2pref": [6, 1024, "L2ARC prefetch allocated size"],
|
|
"l2mfu": [5, 1024, "L2ARC MFU allocated size"],
|
|
"l2mru": [5, 1024, "L2ARC MRU allocated size"],
|
|
"l2data": [6, 1024, "L2ARC data allocated size"],
|
|
"l2meta": [6, 1024, "L2ARC metadata allocated size"],
|
|
"l2pref%": [7, 100, "L2ARC prefetch percentage"],
|
|
"l2mfu%": [6, 100, "L2ARC MFU percentage"],
|
|
"l2mru%": [6, 100, "L2ARC MRU percentage"],
|
|
"l2data%": [7, 100, "L2ARC data percentage"],
|
|
"l2meta%": [7, 100, "L2ARC metadata percentage"],
|
|
"l2asize": [7, 1024, "Actual (compressed) size of the L2ARC"],
|
|
"l2size": [6, 1024, "Size of the L2ARC"],
|
|
"l2bytes": [7, 1024, "Bytes read per second from the L2ARC"],
|
|
"l2wbytes": [8, 1024, "Bytes written per second to the L2ARC"],
|
|
"grow": [4, 1000, "ARC grow disabled"],
|
|
"need": [5, 1024, "ARC reclaim need"],
|
|
"free": [5, 1024, "ARC free memory"],
|
|
"avail": [5, 1024, "ARC available memory"],
|
|
"waste": [5, 1024, "Wasted memory due to round up to pagesize"],
|
|
"ztotal": [6, 1000, "zfetch total prefetcher calls per second"],
|
|
"zhits": [5, 1000, "zfetch stream hits per second"],
|
|
"zahead": [6, 1000, "zfetch hits ahead of streams per second"],
|
|
"zpast": [5, 1000, "zfetch hits behind streams per second"],
|
|
"zmisses": [7, 1000, "zfetch stream misses per second"],
|
|
"zmax": [4, 1000, "zfetch limit reached per second"],
|
|
"zfuture": [7, 1000, "zfetch stream future per second"],
|
|
"zstride": [7, 1000, "zfetch stream strides per second"],
|
|
"zissued": [7, 1000, "zfetch prefetches issued per second"],
|
|
"zactive": [7, 1000, "zfetch prefetches active per second"],
|
|
}
|
|
|
|
# ARC structural breakdown from arc_summary
|
|
structfields = {
|
|
"cmp": ["compressed", "Compressed"],
|
|
"ovh": ["overhead", "Overhead"],
|
|
"bon": ["bonus", "Bonus"],
|
|
"dno": ["dnode", "Dnode"],
|
|
"dbu": ["dbuf", "Dbuf"],
|
|
"hdr": ["hdr", "Header"],
|
|
"l2h": ["l2_hdr", "L2 header"],
|
|
"abd": ["abd_chunk_waste", "ABD chunk waste"],
|
|
}
|
|
structstats = { # size stats
|
|
"percent": "size", # percentage of this value
|
|
"sz": ["_size", "size"],
|
|
}
|
|
|
|
# ARC types breakdown from arc_summary
|
|
typefields = {
|
|
"data": ["data", "ARC data"],
|
|
"meta": ["metadata", "ARC metadata"],
|
|
}
|
|
typestats = { # size stats
|
|
"percent": "cachessz", # percentage of this value
|
|
"tg": ["_target", "target"],
|
|
"sz": ["_size", "size"],
|
|
}
|
|
|
|
# ARC states breakdown from arc_summary
|
|
statefields = {
|
|
"ano": ["anon", "Anonymous"],
|
|
"mfu": ["mfu", "MFU"],
|
|
"mru": ["mru", "MRU"],
|
|
"unc": ["uncached", "Uncached"],
|
|
}
|
|
targetstats = {
|
|
"percent": "cachessz", # percentage of this value
|
|
"fields": ["mfu", "mru"], # only applicable to these fields
|
|
"tg": ["_target", "target"],
|
|
"dt": ["_data_target", "data target"],
|
|
"mt": ["_metadata_target", "metadata target"],
|
|
}
|
|
statestats = { # size stats
|
|
"percent": "cachessz", # percentage of this value
|
|
"sz": ["_size", "size"],
|
|
"da": ["_data", "data size"],
|
|
"me": ["_metadata", "metadata size"],
|
|
"ed": ["_evictable_data", "evictable data size"],
|
|
"em": ["_evictable_metadata", "evictable metadata size"],
|
|
}
|
|
ghoststats = {
|
|
"fields": ["mfu", "mru"], # only applicable to these fields
|
|
"gsz": ["_ghost_size", "ghost size"],
|
|
"gd": ["_ghost_data", "ghost data size"],
|
|
"gm": ["_ghost_metadata", "ghost metadata size"],
|
|
}
|
|
|
|
# fields and stats
|
|
fieldstats = [
|
|
[structfields, structstats],
|
|
[typefields, typestats],
|
|
[statefields, targetstats, statestats, ghoststats],
|
|
]
|
|
for fs in fieldstats:
|
|
fields, stats = fs[0], fs[1:]
|
|
for field, fieldval in fields.items():
|
|
for group in stats:
|
|
for stat, statval in group.items():
|
|
if stat in ["fields", "percent"] or \
|
|
("fields" in group and field not in group["fields"]):
|
|
continue
|
|
colname = field + stat
|
|
coldesc = fieldval[1] + " " + statval[1]
|
|
cols[colname] = [len(colname), 1024, coldesc]
|
|
if "percent" in group:
|
|
cols[colname + "%"] = [len(colname) + 1, 100, \
|
|
coldesc + " percentage"]
|
|
|
|
v = {}
|
|
hdr = ["time", "read", "ddread", "ddh%", "dmread", "dmh%", "pread", "ph%",
|
|
"size", "c", "avail"]
|
|
xhdr = ["time", "mfu", "mru", "mfug", "mrug", "unc", "eskip", "mtxmis",
|
|
"dread", "pread", "read"]
|
|
zhdr = ["time", "ztotal", "zhits", "zahead", "zpast", "zmisses", "zmax",
|
|
"zfuture", "zstride", "zissued", "zactive"]
|
|
sint = 1 # Default interval is 1 second
|
|
count = 1 # Default count is 1
|
|
hdr_intr = 20 # Print header every 20 lines of output
|
|
opfile = None
|
|
sep = " " # Default separator is 2 spaces
|
|
l2exist = False
|
|
cmd = ("Usage: arcstat [-havxp] [-f fields] [-o file] [-s string] [interval "
|
|
"[count]]\n")
|
|
cur = {}
|
|
d = {}
|
|
out = None
|
|
kstat = None
|
|
pretty_print = True
|
|
|
|
|
|
if sys.platform.startswith('freebsd'):
|
|
# Requires py-sysctl on FreeBSD
|
|
import sysctl
|
|
|
|
def kstat_update():
|
|
global kstat
|
|
|
|
k = [ctl for ctl in sysctl.filter('kstat.zfs.misc.arcstats')
|
|
if ctl.type != sysctl.CTLTYPE_NODE]
|
|
k += [ctl for ctl in sysctl.filter('kstat.zfs.misc.zfetchstats')
|
|
if ctl.type != sysctl.CTLTYPE_NODE]
|
|
|
|
if not k:
|
|
sys.exit(1)
|
|
|
|
kstat = {}
|
|
|
|
for s in k:
|
|
if not s:
|
|
continue
|
|
|
|
name, value = s.name, s.value
|
|
|
|
if "arcstats" in name:
|
|
# Trims 'kstat.zfs.misc.arcstats' from the name
|
|
kstat[name[24:]] = int(value)
|
|
else:
|
|
kstat["zfetch_" + name[27:]] = int(value)
|
|
|
|
elif sys.platform.startswith('linux'):
|
|
def kstat_update():
|
|
global kstat
|
|
|
|
k1 = [line.strip() for line in open('/proc/spl/kstat/zfs/arcstats')]
|
|
|
|
k2 = ["zfetch_" + line.strip() for line in
|
|
open('/proc/spl/kstat/zfs/zfetchstats')]
|
|
|
|
if k1 is None or k2 is None:
|
|
sys.exit(1)
|
|
|
|
del k1[0:2]
|
|
del k2[0:2]
|
|
k = k1 + k2
|
|
kstat = {}
|
|
|
|
for s in k:
|
|
if not s:
|
|
continue
|
|
|
|
name, unused, value = s.split()
|
|
kstat[name] = int(value)
|
|
|
|
|
|
def detailed_usage():
|
|
sys.stderr.write("%s\n" % cmd)
|
|
sys.stderr.write("Field definitions are as follows:\n")
|
|
for key in cols:
|
|
sys.stderr.write("%11s : %s\n" % (key, cols[key][2]))
|
|
sys.stderr.write("\n")
|
|
|
|
sys.exit(0)
|
|
|
|
|
|
def usage():
|
|
sys.stderr.write("%s\n" % cmd)
|
|
sys.stderr.write("\t -h : Print this help message\n")
|
|
sys.stderr.write("\t -a : Print all possible stats\n")
|
|
sys.stderr.write("\t -v : List all possible field headers and definitions"
|
|
"\n")
|
|
sys.stderr.write("\t -x : Print extended stats\n")
|
|
sys.stderr.write("\t -z : Print zfetch stats\n")
|
|
sys.stderr.write("\t -f : Specify specific fields to print (see -v)\n")
|
|
sys.stderr.write("\t -o : Redirect output to the specified file\n")
|
|
sys.stderr.write("\t -s : Override default field separator with custom "
|
|
"character or string\n")
|
|
sys.stderr.write("\t -p : Disable auto-scaling of numerical fields\n")
|
|
sys.stderr.write("\nExamples:\n")
|
|
sys.stderr.write("\tarcstat -o /tmp/a.log 2 10\n")
|
|
sys.stderr.write("\tarcstat -s \",\" -o /tmp/a.log 2 10\n")
|
|
sys.stderr.write("\tarcstat -v\n")
|
|
sys.stderr.write("\tarcstat -f time,hit%,dh%,ph%,mh% 1\n")
|
|
sys.stderr.write("\n")
|
|
|
|
sys.exit(1)
|
|
|
|
|
|
def snap_stats():
|
|
global cur
|
|
global kstat
|
|
|
|
prev = copy.deepcopy(cur)
|
|
kstat_update()
|
|
|
|
cur = kstat
|
|
|
|
# fill in additional values from arc_summary
|
|
cur["caches_size"] = caches_size = cur["anon_data"]+cur["anon_metadata"]+\
|
|
cur["mfu_data"]+cur["mfu_metadata"]+cur["mru_data"]+cur["mru_metadata"]+\
|
|
cur["uncached_data"]+cur["uncached_metadata"]
|
|
s = 4294967296
|
|
pd = cur["pd"]
|
|
pm = cur["pm"]
|
|
meta = cur["meta"]
|
|
v = (s-int(pd))*(s-int(meta))/s
|
|
cur["mfu_data_target"] = v / 65536 * caches_size / 65536
|
|
v = (s-int(pm))*int(meta)/s
|
|
cur["mfu_metadata_target"] = v / 65536 * caches_size / 65536
|
|
v = int(pd)*(s-int(meta))/s
|
|
cur["mru_data_target"] = v / 65536 * caches_size / 65536
|
|
v = int(pm)*int(meta)/s
|
|
cur["mru_metadata_target"] = v / 65536 * caches_size / 65536
|
|
|
|
cur["data_target"] = cur["mfu_data_target"] + cur["mru_data_target"]
|
|
cur["metadata_target"] = cur["mfu_metadata_target"] + cur["mru_metadata_target"]
|
|
cur["mfu_target"] = cur["mfu_data_target"] + cur["mfu_metadata_target"]
|
|
cur["mru_target"] = cur["mru_data_target"] + cur["mru_metadata_target"]
|
|
|
|
for key in cur:
|
|
if re.match(key, "class"):
|
|
continue
|
|
if key in prev:
|
|
d[key] = cur[key] - prev[key]
|
|
else:
|
|
d[key] = cur[key]
|
|
|
|
|
|
def isint(num):
|
|
if isinstance(num, float):
|
|
return num.is_integer()
|
|
if isinstance(num, int):
|
|
return True
|
|
return False
|
|
|
|
|
|
def prettynum(sz, scale, num=0):
|
|
suffix = ['', 'K', 'M', 'G', 'T', 'P', 'E', 'Z']
|
|
index = 0
|
|
|
|
# Special case for date field
|
|
if scale == -1:
|
|
return "%s" % num
|
|
|
|
if scale != 100:
|
|
while abs(num) > scale and index < 5:
|
|
num = num / scale
|
|
index += 1
|
|
|
|
width = sz - (0 if index == 0 else 1)
|
|
intlen = len("%.0f" % num) # %.0f rounds to nearest int
|
|
if sint == 1 and isint(num) or width < intlen + 2:
|
|
decimal = 0
|
|
else:
|
|
decimal = 1
|
|
return "%*.*f%s" % (width, decimal, num, suffix[index])
|
|
|
|
|
|
def print_values():
|
|
global hdr
|
|
global sep
|
|
global v
|
|
global pretty_print
|
|
|
|
if pretty_print:
|
|
fmt = lambda col: prettynum(cols[col][0], cols[col][1], v[col])
|
|
else:
|
|
fmt = lambda col: str(v[col])
|
|
|
|
sys.stdout.write(sep.join(fmt(col) for col in hdr))
|
|
sys.stdout.write("\n")
|
|
sys.stdout.flush()
|
|
|
|
|
|
def print_header():
|
|
global hdr
|
|
global sep
|
|
global pretty_print
|
|
|
|
if pretty_print:
|
|
fmt = lambda col: "%*s" % (cols[col][0], col)
|
|
else:
|
|
fmt = lambda col: col
|
|
|
|
sys.stdout.write(sep.join(fmt(col) for col in hdr))
|
|
sys.stdout.write("\n")
|
|
|
|
|
|
def get_terminal_lines():
|
|
try:
|
|
import fcntl
|
|
import termios
|
|
import struct
|
|
data = fcntl.ioctl(sys.stdout.fileno(), termios.TIOCGWINSZ, '1234')
|
|
sz = struct.unpack('hh', data)
|
|
return sz[0]
|
|
except Exception:
|
|
pass
|
|
|
|
|
|
def update_hdr_intr():
|
|
global hdr_intr
|
|
|
|
lines = get_terminal_lines()
|
|
if lines and lines > 3:
|
|
hdr_intr = lines - 3
|
|
|
|
|
|
def resize_handler(signum, frame):
|
|
update_hdr_intr()
|
|
|
|
|
|
def init():
|
|
global sint
|
|
global count
|
|
global hdr
|
|
global xhdr
|
|
global zhdr
|
|
global opfile
|
|
global sep
|
|
global out
|
|
global l2exist
|
|
global pretty_print
|
|
|
|
desired_cols = None
|
|
aflag = False
|
|
xflag = False
|
|
hflag = False
|
|
vflag = False
|
|
zflag = False
|
|
i = 1
|
|
|
|
try:
|
|
opts, args = getopt.getopt(
|
|
sys.argv[1:],
|
|
"axzo:hvs:f:p",
|
|
[
|
|
"all",
|
|
"extended",
|
|
"zfetch",
|
|
"outfile",
|
|
"help",
|
|
"verbose",
|
|
"separator",
|
|
"columns",
|
|
"parsable"
|
|
]
|
|
)
|
|
except getopt.error as msg:
|
|
sys.stderr.write("Error: %s\n" % str(msg))
|
|
usage()
|
|
opts = None
|
|
|
|
for opt, arg in opts:
|
|
if opt in ('-a', '--all'):
|
|
aflag = True
|
|
if opt in ('-x', '--extended'):
|
|
xflag = True
|
|
if opt in ('-o', '--outfile'):
|
|
opfile = arg
|
|
i += 1
|
|
if opt in ('-h', '--help'):
|
|
hflag = True
|
|
if opt in ('-v', '--verbose'):
|
|
vflag = True
|
|
if opt in ('-s', '--separator'):
|
|
sep = arg
|
|
i += 1
|
|
if opt in ('-f', '--columns'):
|
|
desired_cols = arg
|
|
i += 1
|
|
if opt in ('-p', '--parsable'):
|
|
pretty_print = False
|
|
if opt in ('-z', '--zfetch'):
|
|
zflag = True
|
|
i += 1
|
|
|
|
argv = sys.argv[i:]
|
|
sint = int(argv[0]) if argv else sint
|
|
count = int(argv[1]) if len(argv) > 1 else (0 if len(argv) > 0 else 1)
|
|
|
|
if hflag or (xflag and zflag) or ((zflag or xflag) and desired_cols):
|
|
usage()
|
|
|
|
if vflag:
|
|
detailed_usage()
|
|
|
|
if xflag:
|
|
hdr = xhdr
|
|
|
|
if zflag:
|
|
hdr = zhdr
|
|
|
|
update_hdr_intr()
|
|
|
|
# check if L2ARC exists
|
|
snap_stats()
|
|
l2_size = cur.get("l2_size")
|
|
if l2_size:
|
|
l2exist = True
|
|
|
|
if desired_cols:
|
|
hdr = desired_cols.split(",")
|
|
|
|
invalid = []
|
|
incompat = []
|
|
for ele in hdr:
|
|
if ele not in cols:
|
|
invalid.append(ele)
|
|
elif not l2exist and ele.startswith("l2"):
|
|
sys.stdout.write("No L2ARC Here\n%s\n" % ele)
|
|
incompat.append(ele)
|
|
|
|
if len(invalid) > 0:
|
|
sys.stderr.write("Invalid column definition! -- %s\n" % invalid)
|
|
usage()
|
|
|
|
if len(incompat) > 0:
|
|
sys.stderr.write("Incompatible field specified! -- %s\n" %
|
|
incompat)
|
|
usage()
|
|
|
|
if aflag:
|
|
if l2exist:
|
|
hdr = cols.keys()
|
|
else:
|
|
hdr = [col for col in cols.keys() if not col.startswith("l2")]
|
|
|
|
if opfile:
|
|
try:
|
|
out = open(opfile, "w")
|
|
sys.stdout = out
|
|
|
|
except IOError:
|
|
sys.stderr.write("Cannot open %s for writing\n" % opfile)
|
|
sys.exit(1)
|
|
|
|
|
|
def calculate():
|
|
global d
|
|
global v
|
|
global l2exist
|
|
|
|
v = dict()
|
|
v["time"] = time.strftime("%H:%M:%S", time.localtime())
|
|
v["hits"] = d["hits"] / sint
|
|
v["iohs"] = d["iohits"] / sint
|
|
v["miss"] = d["misses"] / sint
|
|
v["read"] = v["hits"] + v["iohs"] + v["miss"]
|
|
v["hit%"] = 100 * v["hits"] / v["read"] if v["read"] > 0 else 0
|
|
v["ioh%"] = 100 * v["iohs"] / v["read"] if v["read"] > 0 else 0
|
|
v["miss%"] = 100 - v["hit%"] - v["ioh%"] if v["read"] > 0 else 0
|
|
|
|
v["dhit"] = (d["demand_data_hits"] + d["demand_metadata_hits"]) / sint
|
|
v["dioh"] = (d["demand_data_iohits"] + d["demand_metadata_iohits"]) / sint
|
|
v["dmis"] = (d["demand_data_misses"] + d["demand_metadata_misses"]) / sint
|
|
|
|
v["dread"] = v["dhit"] + v["dioh"] + v["dmis"]
|
|
v["dh%"] = 100 * v["dhit"] / v["dread"] if v["dread"] > 0 else 0
|
|
v["di%"] = 100 * v["dioh"] / v["dread"] if v["dread"] > 0 else 0
|
|
v["dm%"] = 100 - v["dh%"] - v["di%"] if v["dread"] > 0 else 0
|
|
|
|
v["ddhit"] = d["demand_data_hits"] / sint
|
|
v["ddioh"] = d["demand_data_iohits"] / sint
|
|
v["ddmis"] = d["demand_data_misses"] / sint
|
|
|
|
v["ddread"] = v["ddhit"] + v["ddioh"] + v["ddmis"]
|
|
v["ddh%"] = 100 * v["ddhit"] / v["ddread"] if v["ddread"] > 0 else 0
|
|
v["ddi%"] = 100 * v["ddioh"] / v["ddread"] if v["ddread"] > 0 else 0
|
|
v["ddm%"] = 100 - v["ddh%"] - v["ddi%"] if v["ddread"] > 0 else 0
|
|
|
|
v["dmhit"] = d["demand_metadata_hits"] / sint
|
|
v["dmioh"] = d["demand_metadata_iohits"] / sint
|
|
v["dmmis"] = d["demand_metadata_misses"] / sint
|
|
|
|
v["dmread"] = v["dmhit"] + v["dmioh"] + v["dmmis"]
|
|
v["dmh%"] = 100 * v["dmhit"] / v["dmread"] if v["dmread"] > 0 else 0
|
|
v["dmi%"] = 100 * v["dmioh"] / v["dmread"] if v["dmread"] > 0 else 0
|
|
v["dmm%"] = 100 - v["dmh%"] - v["dmi%"] if v["dmread"] > 0 else 0
|
|
|
|
v["phit"] = (d["prefetch_data_hits"] + d["prefetch_metadata_hits"]) / sint
|
|
v["pioh"] = (d["prefetch_data_iohits"] +
|
|
d["prefetch_metadata_iohits"]) / sint
|
|
v["pmis"] = (d["prefetch_data_misses"] +
|
|
d["prefetch_metadata_misses"]) / sint
|
|
|
|
v["pread"] = v["phit"] + v["pioh"] + v["pmis"]
|
|
v["ph%"] = 100 * v["phit"] / v["pread"] if v["pread"] > 0 else 0
|
|
v["pi%"] = 100 * v["pioh"] / v["pread"] if v["pread"] > 0 else 0
|
|
v["pm%"] = 100 - v["ph%"] - v["pi%"] if v["pread"] > 0 else 0
|
|
|
|
v["pdhit"] = d["prefetch_data_hits"] / sint
|
|
v["pdioh"] = d["prefetch_data_iohits"] / sint
|
|
v["pdmis"] = d["prefetch_data_misses"] / sint
|
|
|
|
v["pdread"] = v["pdhit"] + v["pdioh"] + v["pdmis"]
|
|
v["pdh%"] = 100 * v["pdhit"] / v["pdread"] if v["pdread"] > 0 else 0
|
|
v["pdi%"] = 100 * v["pdioh"] / v["pdread"] if v["pdread"] > 0 else 0
|
|
v["pdm%"] = 100 - v["pdh%"] - v["pdi%"] if v["pdread"] > 0 else 0
|
|
|
|
v["pmhit"] = d["prefetch_metadata_hits"] / sint
|
|
v["pmioh"] = d["prefetch_metadata_iohits"] / sint
|
|
v["pmmis"] = d["prefetch_metadata_misses"] / sint
|
|
|
|
v["pmread"] = v["pmhit"] + v["pmioh"] + v["pmmis"]
|
|
v["pmh%"] = 100 * v["pmhit"] / v["pmread"] if v["pmread"] > 0 else 0
|
|
v["pmi%"] = 100 * v["pmioh"] / v["pmread"] if v["pmread"] > 0 else 0
|
|
v["pmm%"] = 100 - v["pmh%"] - v["pmi%"] if v["pmread"] > 0 else 0
|
|
|
|
v["mhit"] = (d["prefetch_metadata_hits"] +
|
|
d["demand_metadata_hits"]) / sint
|
|
v["mioh"] = (d["prefetch_metadata_iohits"] +
|
|
d["demand_metadata_iohits"]) / sint
|
|
v["mmis"] = (d["prefetch_metadata_misses"] +
|
|
d["demand_metadata_misses"]) / sint
|
|
|
|
v["mread"] = v["mhit"] + v["mioh"] + v["mmis"]
|
|
v["mh%"] = 100 * v["mhit"] / v["mread"] if v["mread"] > 0 else 0
|
|
v["mi%"] = 100 * v["mioh"] / v["mread"] if v["mread"] > 0 else 0
|
|
v["mm%"] = 100 - v["mh%"] - v["mi%"] if v["mread"] > 0 else 0
|
|
|
|
v["arcsz"] = cur["size"]
|
|
v["size"] = cur["size"]
|
|
v["c"] = cur["c"]
|
|
v["mfu"] = d["mfu_hits"] / sint
|
|
v["mru"] = d["mru_hits"] / sint
|
|
v["mrug"] = d["mru_ghost_hits"] / sint
|
|
v["mfug"] = d["mfu_ghost_hits"] / sint
|
|
v["unc"] = d["uncached_hits"] / sint
|
|
v["eskip"] = d["evict_skip"] / sint
|
|
v["el2skip"] = d["evict_l2_skip"] / sint
|
|
v["el2cach"] = d["evict_l2_cached"] / sint
|
|
v["el2el"] = d["evict_l2_eligible"] / sint
|
|
v["el2mfu"] = d["evict_l2_eligible_mfu"] / sint
|
|
v["el2mru"] = d["evict_l2_eligible_mru"] / sint
|
|
v["el2inel"] = d["evict_l2_ineligible"] / sint
|
|
v["mtxmis"] = d["mutex_miss"] / sint
|
|
v["ztotal"] = (d["zfetch_hits"] + d["zfetch_future"] + d["zfetch_stride"] +
|
|
d["zfetch_past"] + d["zfetch_misses"]) / sint
|
|
v["zhits"] = d["zfetch_hits"] / sint
|
|
v["zahead"] = (d["zfetch_future"] + d["zfetch_stride"]) / sint
|
|
v["zpast"] = d["zfetch_past"] / sint
|
|
v["zmisses"] = d["zfetch_misses"] / sint
|
|
v["zmax"] = d["zfetch_max_streams"] / sint
|
|
v["zfuture"] = d["zfetch_future"] / sint
|
|
v["zstride"] = d["zfetch_stride"] / sint
|
|
v["zissued"] = d["zfetch_io_issued"] / sint
|
|
v["zactive"] = d["zfetch_io_active"] / sint
|
|
|
|
# ARC structural breakdown, ARC types breakdown, ARC states breakdown
|
|
v["cachessz"] = cur["caches_size"]
|
|
for fs in fieldstats:
|
|
fields, stats = fs[0], fs[1:]
|
|
for field, fieldval in fields.items():
|
|
for group in stats:
|
|
for stat, statval in group.items():
|
|
if stat in ["fields", "percent"] or \
|
|
("fields" in group and field not in group["fields"]):
|
|
continue
|
|
colname = field + stat
|
|
v[colname] = cur[fieldval[0] + statval[0]]
|
|
if "percent" in group:
|
|
v[colname + "%"] = 100 * v[colname] / \
|
|
v[group["percent"]] if v[group["percent"]] > 0 else 0
|
|
|
|
if l2exist:
|
|
v["l2hits"] = d["l2_hits"] / sint
|
|
v["l2miss"] = d["l2_misses"] / sint
|
|
v["l2read"] = v["l2hits"] + v["l2miss"]
|
|
v["l2hit%"] = 100 * v["l2hits"] / v["l2read"] if v["l2read"] > 0 else 0
|
|
|
|
v["l2miss%"] = 100 - v["l2hit%"] if v["l2read"] > 0 else 0
|
|
v["l2asize"] = cur["l2_asize"]
|
|
v["l2size"] = cur["l2_size"]
|
|
v["l2bytes"] = d["l2_read_bytes"] / sint
|
|
v["l2wbytes"] = d["l2_write_bytes"] / sint
|
|
|
|
v["l2pref"] = cur["l2_prefetch_asize"]
|
|
v["l2mfu"] = cur["l2_mfu_asize"]
|
|
v["l2mru"] = cur["l2_mru_asize"]
|
|
v["l2data"] = cur["l2_bufc_data_asize"]
|
|
v["l2meta"] = cur["l2_bufc_metadata_asize"]
|
|
v["l2pref%"] = 100 * v["l2pref"] / v["l2asize"]
|
|
v["l2mfu%"] = 100 * v["l2mfu"] / v["l2asize"]
|
|
v["l2mru%"] = 100 * v["l2mru"] / v["l2asize"]
|
|
v["l2data%"] = 100 * v["l2data"] / v["l2asize"]
|
|
v["l2meta%"] = 100 * v["l2meta"] / v["l2asize"]
|
|
|
|
v["grow"] = 0 if cur["arc_no_grow"] else 1
|
|
v["need"] = cur["arc_need_free"]
|
|
v["free"] = cur["memory_free_bytes"]
|
|
v["avail"] = cur["memory_available_bytes"]
|
|
v["waste"] = cur["abd_chunk_waste_size"]
|
|
|
|
|
|
def main():
|
|
global sint
|
|
global count
|
|
global hdr_intr
|
|
|
|
i = 0
|
|
count_flag = 0
|
|
|
|
init()
|
|
if count > 0:
|
|
count_flag = 1
|
|
|
|
signal(SIGINT, SIG_DFL)
|
|
signal(SIGWINCH, resize_handler)
|
|
while True:
|
|
if i == 0:
|
|
print_header()
|
|
|
|
snap_stats()
|
|
calculate()
|
|
print_values()
|
|
|
|
if count_flag == 1:
|
|
if count <= 1:
|
|
break
|
|
count -= 1
|
|
|
|
i = 0 if i >= hdr_intr else i + 1
|
|
time.sleep(sint)
|
|
|
|
if out:
|
|
out.close()
|
|
|
|
|
|
if __name__ == '__main__':
|
|
main()
|