[lkp] [SUNRPC] e1e5693191: +71.4% fsmark.files_per_sec

From: kernel test robot
Date: Wed Jan 13 2016 - 22:06:04 EST


FYI, we noticed the below changes on

git://internal_merge_and_test_tree revert-e1e56931915cc3fafb4e17d4e1073e24c0a304ce-e1e56931915cc3fafb4e17d4e1073e24c0a304ce
commit e1e56931915cc3fafb4e17d4e1073e24c0a304ce ("SUNRPC: Fixup socket wait for memory")


=========================================================================================
compiler/cpufreq_governor/disk/filesize/fs2/fs/iterations/kconfig/md/nr_threads/rootfs/sync_method/tbox_group/test_size/testcase:
gcc-4.9/performance/3HDD/4M/nfsv4/btrfs/1x/x86_64-rhel/RAID0/64t/debian-x86_64-2015-02-07.cgz/NoSync/ivb44/200G/fsmark

commit:
v4.4-rc8
e1e56931915cc3fafb4e17d4e1073e24c0a304ce

v4.4-rc8 e1e56931915cc3fafb4e17d4e1
---------------- --------------------------
%stddev %change %stddev
\ | \
1.018e+09 ± 2% -37.3% 6.38e+08 ± 1% fsmark.app_overhead
22.40 ± 14% +71.4% 38.40 ± 0% fsmark.files_per_sec
2287 ± 0% -45.0% 1258 ± 0% fsmark.time.elapsed_time
2287 ± 0% -45.0% 1258 ± 0% fsmark.time.elapsed_time.max
10.75 ± 4% +83.7% 19.75 ± 2% fsmark.time.percent_of_cpu_this_job_got
1.2e+10 ± 2% -74.1% 3.105e+09 ± 0% latency_stats.sum.wait_on_page_bit.__filemap_fdatawait_range.filemap_fdatawait_range.filemap_write_and_wait_range.nfs4_file_fsync.[nfsv4].vfs_fsync_range.vfs_fsync.nfs4_file_flush.[nfsv4].filp_close.__close_fd.SyS_close.entry_SYSCALL_64_fastpath
2319 ± 0% -44.4% 1290 ± 0% uptime.boot
97173 ± 0% -42.3% 56052 ± 0% uptime.idle
8108411 ± 1% +21.1% 9821389 ± 0% meminfo.MemFree
31468 ± 0% +46.7% 46177 ± 0% meminfo.NFS_Unstable
21802 ± 2% -39.3% 13242 ± 2% meminfo.Writeback
2287 ± 0% -45.0% 1258 ± 0% time.elapsed_time
2287 ± 0% -45.0% 1258 ± 0% time.elapsed_time.max
10.75 ± 4% +83.7% 19.75 ± 2% time.percent_of_cpu_this_job_got
1266178 ± 0% -21.0% 1000024 ± 0% softirqs.NET_RX
438856 ± 1% -19.5% 353420 ± 2% softirqs.RCU
380524 ± 0% -19.3% 307256 ± 0% softirqs.SCHED
841106 ± 0% -21.0% 664215 ± 1% softirqs.TIMER
94084 ± 0% +81.3% 170560 ± 0% vmstat.io.bo
8104477 ± 1% +21.0% 9810467 ± 0% vmstat.memory.free
6.00 ± 0% -20.8% 4.75 ± 9% vmstat.procs.b
9383 ± 1% +65.2% 15505 ± 0% vmstat.system.cs
1251 ± 1% +53.4% 1919 ± 0% vmstat.system.in
15732936 ± 3% -15.0% 13365333 ± 1% numa-numastat.node0.numa_foreign
5797897 ± 4% +32.6% 7688554 ± 0% numa-numastat.node0.numa_miss
5798012 ± 4% +32.6% 7688606 ± 0% numa-numastat.node0.other_node
5797908 ± 4% +32.6% 7688549 ± 0% numa-numastat.node1.numa_foreign
15732931 ± 3% -15.0% 13365359 ± 1% numa-numastat.node1.numa_miss
15732913 ± 3% -15.0% 13365296 ± 1% numa-numastat.node1.other_node
1.147e+10 ± 0% -22.6% 8.874e+09 ± 0% cpuidle.C1-IVT.time
1.154e+09 ± 1% -35.6% 7.428e+08 ± 2% cpuidle.C1E-IVT.time
5.187e+08 ± 2% -33.2% 3.465e+08 ± 1% cpuidle.C3-IVT.time
153669 ± 0% +21.9% 187252 ± 1% cpuidle.C3-IVT.usage
9.566e+10 ± 1% -48.0% 4.973e+10 ± 0% cpuidle.C6-IVT.time
3124803 ± 0% -16.7% 2604298 ± 0% cpuidle.C6-IVT.usage
1.013e+08 ± 15% -61.7% 38786807 ± 20% cpuidle.POLL.time
10572 ± 22% -66.8% 3507 ± 1% cpuidle.POLL.usage
9640 ± 2% +9.0% 10507 ± 7% numa-meminfo.node0.KernelStack
3575187 ± 2% +31.9% 4716524 ± 0% numa-meminfo.node0.MemFree
14978 ± 2% +47.5% 22087 ± 0% numa-meminfo.node0.NFS_Unstable
10624 ± 1% -35.9% 6810 ± 3% numa-meminfo.node0.Writeback
8474 ± 1% +15.3% 9767 ± 2% numa-meminfo.node1.Dirty
4534267 ± 1% +12.5% 5103016 ± 1% numa-meminfo.node1.MemFree
16667 ± 0% +46.7% 24444 ± 2% numa-meminfo.node1.NFS_Unstable
11319 ± 2% -40.2% 6768 ± 3% numa-meminfo.node1.Writeback
0.96 ± 1% +33.0% 1.28 ± 0% turbostat.%Busy
29.75 ± 1% +30.3% 38.75 ± 1% turbostat.Avg_MHz
23.58 ± 0% +36.4% 32.17 ± 0% turbostat.CPU%c1
0.80 ± 3% +15.5% 0.93 ± 1% turbostat.CPU%c3
74.66 ± 0% -12.1% 65.62 ± 0% turbostat.CPU%c6
58.59 ± 0% +4.0% 60.94 ± 0% turbostat.CorWatt
11.84 ± 4% -46.8% 6.29 ± 5% turbostat.Pkg%pc2
0.08 ± 11% -83.3% 0.01 ± 34% turbostat.Pkg%pc3
3.77 ± 2% +41.7% 5.33 ± 2% turbostat.Pkg%pc6
88.77 ± 0% +2.3% 90.81 ± 0% turbostat.PkgWatt
3.48 ± 0% +12.3% 3.91 ± 0% turbostat.RAMWatt
1511 ± 16% +21.5% 1835 ± 8% proc-vmstat.allocstall
11282 ± 3% +47.5% 16646 ± 2% proc-vmstat.kswapd_low_wmark_hit_quickly
2027101 ± 1% +21.1% 2455329 ± 0% proc-vmstat.nr_free_pages
7862 ± 0% +46.6% 11524 ± 0% proc-vmstat.nr_unstable
5456 ± 2% -39.2% 3319 ± 2% proc-vmstat.nr_writeback
4776414 ± 0% -44.6% 2643782 ± 0% proc-vmstat.pgfault
253751 ± 16% +21.2% 307466 ± 8% proc-vmstat.pgscan_direct_dma32
5618934 ± 16% +21.2% 6810557 ± 8% proc-vmstat.pgscan_direct_normal
252127 ± 16% +21.8% 306978 ± 8% proc-vmstat.pgsteal_direct_dma32
5584293 ± 16% +21.8% 6800262 ± 8% proc-vmstat.pgsteal_direct_normal
867072 ± 0% +50.5% 1304864 ± 0% proc-vmstat.slabs_scanned
21817 ± 0% -21.8% 17066 ± 0% proc-vmstat.workingset_refault
8282 ± 5% +10.1% 9122 ± 2% slabinfo.anon_vma.active_objs
8282 ± 5% +10.1% 9122 ± 2% slabinfo.anon_vma.num_objs
17264 ± 0% -10.4% 15468 ± 1% slabinfo.btrfs_extent_buffer.active_objs
17374 ± 0% -10.5% 15551 ± 1% slabinfo.btrfs_extent_buffer.num_objs
6257 ± 1% -16.3% 5234 ± 0% slabinfo.kmalloc-1024.active_objs
6467 ± 1% -16.7% 5385 ± 0% slabinfo.kmalloc-1024.num_objs
52661 ± 0% +18.8% 62552 ± 0% slabinfo.kmalloc-128.active_objs
880.75 ± 0% +16.7% 1028 ± 0% slabinfo.kmalloc-128.active_slabs
56414 ± 0% +16.7% 65812 ± 0% slabinfo.kmalloc-128.num_objs
880.75 ± 0% +16.7% 1028 ± 0% slabinfo.kmalloc-128.num_slabs
10336 ± 10% +15.9% 11978 ± 2% slabinfo.kmalloc-256.active_objs
24011 ± 1% -15.1% 20393 ± 1% slabinfo.kmalloc-512.active_objs
24466 ± 1% -15.1% 20768 ± 1% slabinfo.kmalloc-512.num_objs
28192 ± 1% +14.7% 32329 ± 2% slabinfo.kmalloc-96.active_objs
684.00 ± 1% +14.0% 779.50 ± 1% slabinfo.kmalloc-96.active_slabs
28758 ± 1% +13.9% 32765 ± 1% slabinfo.kmalloc-96.num_objs
684.00 ± 1% +14.0% 779.50 ± 1% slabinfo.kmalloc-96.num_slabs
893799 ± 2% +31.9% 1179152 ± 0% numa-vmstat.node0.nr_free_pages
602.00 ± 2% +9.0% 656.25 ± 7% numa-vmstat.node0.nr_kernel_stack
3742 ± 2% +48.2% 5545 ± 1% numa-vmstat.node0.nr_unstable
2656 ± 0% -36.8% 1679 ± 3% numa-vmstat.node0.nr_writeback
7234228 ± 5% -22.1% 5636260 ± 3% numa-vmstat.node0.numa_foreign
25126071 ± 0% -10.1% 22591402 ± 0% numa-vmstat.node0.numa_hit
25076317 ± 0% -10.2% 22526610 ± 0% numa-vmstat.node0.numa_local
2332523 ± 4% +31.5% 3066132 ± 1% numa-vmstat.node0.numa_miss
2382277 ± 4% +31.4% 3130924 ± 1% numa-vmstat.node0.numa_other
14530 ± 0% -22.6% 11245 ± 0% numa-vmstat.node0.workingset_refault
2117 ± 1% +15.9% 2453 ± 1% numa-vmstat.node1.nr_dirty
1133575 ± 1% +12.5% 1275767 ± 1% numa-vmstat.node1.nr_free_pages
4163 ± 0% +47.6% 6146 ± 1% numa-vmstat.node1.nr_unstable
2829 ± 2% -40.2% 1691 ± 3% numa-vmstat.node1.nr_writeback
2332530 ± 4% +31.5% 3066140 ± 1% numa-vmstat.node1.numa_foreign
7234236 ± 5% -22.1% 5636265 ± 3% numa-vmstat.node1.numa_miss
7263978 ± 5% -22.2% 5651010 ± 3% numa-vmstat.node1.numa_other
7284 ± 2% -20.0% 5825 ± 1% numa-vmstat.node1.workingset_refault


fsmark.files_per_sec

40 ++---------------------------------------------------------------------+
O O O O O O O O O O O O O O O O O O O O O O O
35 ++ |
30 ++ |
| |
25 ++ * *.. *.. .*..*..*..*...*..*.. * *.. * *..|
|..: : .. .. ..: : ..: : |
20 *+ : : * * * : : *..* : : *
| : : : : : : |
15 ++ : : : : : : |
10 ++ : : : : : : |
| : : : : : : |
5 ++ : : : : : : |
| : : : |
0 ++----*-----------------------------------------*----------------*-----+


fsmark.app_overhead

1.2e+09 ++----------------------------------------------------------------+
| .*.. |
1e+09 *+.* *.. *. *.. .*.. .*..* *.. .*..* *..|
| : : *.. .. *..*. *. : : *. : : *
| : : * : : : : |
8e+08 ++ : : : : : : |
O : O : O : O : O O: : O
6e+08 ++ O: :O O O O O O O O O O: :O O : O :O |
| : : O : : : : |
4e+08 ++ : : : : : : |
| : : : : : : |
| : : : : : : |
2e+08 ++ :: :: :: |
| : : : |
0 ++----*--------------------------------------*--------------*-----+


fsmark.time.percent_of_cpu_this_job_got

25 ++---------------------------------------------------------------------+
| |
O O O O |
20 ++ O O O O O O O O O O O O O O O O O O O
| |
| |
15 ++ |
| |
10 *+.* *..*..*..*...*..*..*..*...*..*..*..* *..*..*..* *..*
| : : : : : : |
| : : : : : : |
5 ++ : : : : : : |
| : : : : : : |
| :: :: : : |
0 ++----*-----------------------------------------*----------------*-----+


fsmark.time.elapsed_time

2500 ++-------------------------------------------------------------------+
*..* *...*..*..*..*..*..*..*...*.. .*..* *..*..*...* *..*
| : : *. : : : : |
2000 ++ : : : : : : |
| : : : : : : |
| : : : : : : |
1500 ++ : : : : : : |
O O: O :O O O O O O O O O O O O: O :O O O O: O :O O
1000 ++ : : : : : : |
| : : : : : : |
| : : : : : : |
500 ++ : : : : : : |
| : : : |
| : : : |
0 ++----*----------------------------------------*---------------*-----+


fsmark.time.elapsed_time.max

2500 ++-------------------------------------------------------------------+
*..* *...*..*..*..*..*..*..*...*.. .*..* *..*..*...* *..*
| : : *. : : : : |
2000 ++ : : : : : : |
| : : : : : : |
| : : : : : : |
1500 ++ : : : : : : |
O O: O :O O O O O O O O O O O O: O :O O O O: O :O O
1000 ++ : : : : : : |
| : : : : : : |
| : : : : : : |
500 ++ : : : : : : |
| : : : |
| : : : |
0 ++----*----------------------------------------*---------------*-----+

[*] bisect-good sample
[O] bisect-bad sample

To reproduce:

git clone git://git.kernel.org/pub/scm/linux/kernel/git/wfg/lkp-tests.git
cd lkp-tests
bin/lkp install job.yaml # job file is attached in this email
bin/lkp run job.yaml


Disclaimer:
Results have been estimated based on internal Intel analysis and are provided
for informational purposes only. Any difference in system hardware or software
design or configuration may affect actual performance.


Thanks,
Ying Huang
---
LKP_SERVER: inn
LKP_CGI_PORT: 80
LKP_CIFS_PORT: 139
testcase: fsmark
default-monitors:
wait: activate-monitor
kmsg:
uptime:
iostat:
vmstat:
numa-numastat:
numa-vmstat:
numa-meminfo:
proc-vmstat:
proc-stat:
interval: 10
meminfo:
slabinfo:
interrupts:
lock_stat:
latency_stats:
softirqs:
bdi_dev_mapping:
diskstats:
nfsstat:
cpuidle:
cpufreq-stats:
turbostat:
pmeter:
sched_debug:
interval: 60
cpufreq_governor: performance
default-watchdogs:
oom-killer:
watchdog:
commit: e1e56931915cc3fafb4e17d4e1073e24c0a304ce
model: Ivytown Ivy Bridge-EP
nr_cpu: 48
memory: 64G
nr_hdd_partitions: 3
hdd_partitions: "/dev/disk/by-id/ata-WDC_WD1003FBYZ-010FB0_WD-WCAW36*-part1"
swap_partitions: "/dev/disk/by-id/ata-WDC_WD1003FBYZ-010FB0_WD-WCAW36795753-part2"
rootfs_partition: "/dev/disk/by-id/ata-WDC_WD1003FBYZ-010FB0_WD-WCAW36795753-part3"
netconsole_port: 6644
category: benchmark
iterations: 1x
nr_threads: 64t
disk: 3HDD
md: RAID0
fs: btrfs
fs2: nfsv4
fsmark:
filesize: 4M
test_size: 200G
sync_method: NoSync
queue: bisect
testbox: ivb44
tbox_group: ivb44
kconfig: x86_64-rhel
enqueue_time: 2016-01-13 04:20:39.008773586 +08:00
id: 036ebf9afe73b241465ac2a779baca7ba73a394e
user: lkp
compiler: gcc-4.9
head_commit: cdc5595577acfbf0a497e76c6ffff302c974decf
base_commit: 168309855a7d1e16db751e9c647119fe2d2dc878
branch: linux-devel/devel-hourly-2016011007
rootfs: debian-x86_64-2015-02-07.cgz
result_root: "/result/fsmark/performance-1x-64t-3HDD-RAID0-btrfs-nfsv4-4M-200G-NoSync/ivb44/debian-x86_64-2015-02-07.cgz/x86_64-rhel/gcc-4.9/e1e56931915cc3fafb4e17d4e1073e24c0a304ce/0"
job_file: "/lkp/scheduled/ivb44/bisect_fsmark-performance-1x-64t-3HDD-RAID0-btrfs-nfsv4-4M-200G-NoSync-debian-x86_64-2015-02-07.cgz-x86_64-rhel-e1e56931915cc3fafb4e17d4e1073e24c0a304ce-20160113-71652-1fbeiw8-0.yaml"
max_uptime: 3600
initrd: "/osimage/debian/debian-x86_64-2015-02-07.cgz"
bootloader_append:
- root=/dev/ram0
- user=lkp
- job=/lkp/scheduled/ivb44/bisect_fsmark-performance-1x-64t-3HDD-RAID0-btrfs-nfsv4-4M-200G-NoSync-debian-x86_64-2015-02-07.cgz-x86_64-rhel-e1e56931915cc3fafb4e17d4e1073e24c0a304ce-20160113-71652-1fbeiw8-0.yaml
- ARCH=x86_64
- kconfig=x86_64-rhel
- branch=linux-devel/devel-hourly-2016011007
- commit=e1e56931915cc3fafb4e17d4e1073e24c0a304ce
- BOOT_IMAGE=/pkg/linux/x86_64-rhel/gcc-4.9/e1e56931915cc3fafb4e17d4e1073e24c0a304ce/vmlinuz-4.4.0-rc8-00001-ge1e5693
- max_uptime=3600
- RESULT_ROOT=/result/fsmark/performance-1x-64t-3HDD-RAID0-btrfs-nfsv4-4M-200G-NoSync/ivb44/debian-x86_64-2015-02-07.cgz/x86_64-rhel/gcc-4.9/e1e56931915cc3fafb4e17d4e1073e24c0a304ce/0
- LKP_SERVER=inn
- |2-


earlyprintk=ttyS0,115200 systemd.log_level=err
debug apic=debug sysrq_always_enabled rcupdate.rcu_cpu_stall_timeout=100
panic=-1 softlockup_panic=1 nmi_watchdog=panic oops=panic load_ramdisk=2 prompt_ramdisk=0
console=ttyS0,115200 console=tty0 vga=normal

rw
lkp_initrd: "/lkp/lkp/lkp-x86_64.cgz"
modules_initrd: "/pkg/linux/x86_64-rhel/gcc-4.9/e1e56931915cc3fafb4e17d4e1073e24c0a304ce/modules.cgz"
bm_initrd: "/osimage/deps/debian-x86_64-2015-02-07.cgz/lkp.cgz,/osimage/deps/debian-x86_64-2015-02-07.cgz/run-ipconfig.cgz,/osimage/deps/debian-x86_64-2015-02-07.cgz/turbostat.cgz,/lkp/benchmarks/turbostat.cgz,/osimage/deps/debian-x86_64-2015-02-07.cgz/fs.cgz,/osimage/deps/debian-x86_64-2015-02-07.cgz/fs2.cgz,/lkp/benchmarks/fsmark.cgz"
linux_headers_initrd: "/pkg/linux/x86_64-rhel/gcc-4.9/e1e56931915cc3fafb4e17d4e1073e24c0a304ce/linux-headers.cgz"
repeat_to: 2
kernel: "/pkg/linux/x86_64-rhel/gcc-4.9/e1e56931915cc3fafb4e17d4e1073e24c0a304ce/vmlinuz-4.4.0-rc8-00001-ge1e5693"
dequeue_time: 2016-01-13 04:30:07.418730935 +08:00
job_state: finished
loadavg: 67.41 69.92 53.01 2/599 33044
start_time: '1452630676'
end_time: '1452631940'
version: "/lkp/lkp/.src-20160112-223131"

Attachment: reproduce.sh
Description: Bourne shell script