[LKP] [sched] 2db34be8238: -7.0% aim7.jobs-per-min

From: Huang Ying
Date: Fri May 08 2015 - 03:13:53 EST


FYI, we noticed the below changes on

git://bee.sh.intel.com/git/ydu19/linux for-lkp
commit 2db34be8238b94521a8985f66d2520e0492dcfba ("sched: Rewrite per entity runnable load average tracking")


testcase/path_params/tbox_group: aim7/performance-2000-fork_test/brickland3

72cc303b8e634656 2db34be8238b94521a8985f66d
---------------- --------------------------
%stddev %change %stddev
\ | \
24953 Â 1% -7.0% 23199 Â 1% aim7.jobs-per-min
41872 Â 38% +757.8% 359187 Â 13% aim7.time.involuntary_context_switches
228 Â 5% -11.0% 203 Â 3% aim7.time.user_time
90816951 Â 3% -6.9% 84529093 Â 5% aim7.time.voluntary_context_switches
481 Â 1% +7.6% 517 Â 1% aim7.time.elapsed_time
481 Â 1% +7.6% 517 Â 1% aim7.time.elapsed_time.max
751 Â 2% -7.8% 693 Â 1% pmeter.Average_Active_Power
25 Â 6% +237.3% 86 Â 7% vmstat.procs.r
388606 Â 4% -19.6% 312449 Â 6% vmstat.system.cs
41872 Â 38% +757.8% 359187 Â 13% time.involuntary_context_switches
228 Â 5% -11.0% 203 Â 3% time.user_time
275414 Â 11% -41.7% 160498 Â 14% softirqs.HRTIMER
5138484 Â 4% -14.7% 4385384 Â 2% softirqs.RCU
345303 Â 1% -9.9% 311201 Â 0% meminfo.Active
249510 Â 2% -13.4% 215994 Â 0% meminfo.Active(anon)
210600 Â 2% -16.4% 176076 Â 0% meminfo.AnonPages
248769 Â 1% -14.2% 213525 Â 2% meminfo.KernelStack
160700 Â 4% -14.3% 137788 Â 6% meminfo.PageTables
1.537e+08 Â 1% +37.9% 2.119e+08 Â 2% numa-numastat.node0.local_node
1.537e+08 Â 1% +37.9% 2.119e+08 Â 2% numa-numastat.node0.numa_hit
1 Â 24% +400.0% 8 Â 45% numa-numastat.node1.other_node
2 Â 36% +277.8% 8 Â 21% numa-numastat.node2.other_node
1.594e+09 Â 8% +124.3% 3.575e+09 Â 4% cpuidle.C1-IVT-4S.time
2954018 Â 15% +482.6% 17209296 Â 9% cpuidle.C1-IVT-4S.usage
1.34e+09 Â 11% -30.0% 9.384e+08 Â 6% cpuidle.C1E-IVT-4S.time
4.316e+09 Â 6% -53.6% 2.003e+09 Â 6% cpuidle.C3-IVT-4S.time
8127887 Â 11% -23.6% 6208689 Â 3% cpuidle.C3-IVT-4S.usage
48976675 Â 1% -34.8% 31946658 Â 2% cpuidle.C6-IVT-4S.usage
177 Â 6% -55.6% 78 Â 15% cpuidle.POLL.usage
52.89 Â 2% -24.1% 40.14 Â 1% turbostat.CPU%c1
4.65 Â 2% -41.9% 2.70 Â 10% turbostat.CPU%c3
18.38 Â 14% +88.6% 34.67 Â 2% turbostat.CPU%c6
283 Â 3% -13.6% 244 Â 1% turbostat.CorWatt
0.78 Â 9% +2400.0% 19.44 Â 1% turbostat.Pkg%pc2
354 Â 2% -11.2% 315 Â 1% turbostat.PkgWatt
93.83 Â 0% -7.5% 86.84 Â 0% turbostat.RAMWatt
62386 Â 2% -13.4% 54027 Â 0% proc-vmstat.nr_active_anon
52669 Â 2% -16.4% 44028 Â 0% proc-vmstat.nr_anon_pages
202 Â 24% -54.6% 92 Â 21% proc-vmstat.nr_dirtied
15558 Â 1% -14.2% 13356 Â 1% proc-vmstat.nr_kernel_stack
40156 Â 4% -14.2% 34441 Â 6% proc-vmstat.nr_page_table_pages
220 Â 15% -37.8% 136 Â 16% proc-vmstat.nr_written
6411013 Â 1% +42.5% 9133531 Â 2% proc-vmstat.pgalloc_dma32
71679 Â 2% +15.8% 83023 Â 2% numa-meminfo.node0.SUnreclaim
683976 Â 1% +20.6% 824678 Â 2% numa-meminfo.node0.MemUsed
92533 Â 1% +6.3% 98335 Â 4% numa-meminfo.node0.Inactive
803390 Â 10% -25.0% 602675 Â 9% numa-meminfo.node1.MemUsed
70282 Â 25% -42.4% 40515 Â 24% numa-meminfo.node1.KernelStack
8654 Â 44% -82.0% 1555 Â 41% numa-meminfo.node3.Shmem
8023 Â 44% -87.6% 995 Â 35% numa-meminfo.node3.Inactive(anon)
39389 Â 7% +98.6% 78242 Â 2% slabinfo.kmalloc-128.active_objs
39403 Â 7% +99.5% 78610 Â 3% slabinfo.kmalloc-128.num_objs
615 Â 7% +99.6% 1227 Â 3% slabinfo.kmalloc-128.active_slabs
615 Â 7% +99.6% 1227 Â 3% slabinfo.kmalloc-128.num_slabs
193739 Â 0% +10.6% 214199 Â 0% slabinfo.kmalloc-64.num_objs
190791 Â 0% +11.1% 212045 Â 0% slabinfo.kmalloc-64.active_objs
3026 Â 0% +10.6% 3346 Â 0% slabinfo.kmalloc-64.num_slabs
3026 Â 0% +10.6% 3346 Â 0% slabinfo.kmalloc-64.active_slabs
8593 Â 0% +10.5% 9491 Â 0% slabinfo.mm_struct.num_objs
8454 Â 0% +10.1% 9308 Â 0% slabinfo.mm_struct.active_objs
75192762 Â 1% +36.5% 1.026e+08 Â 3% numa-vmstat.node0.numa_hit
17922 Â 2% +15.8% 20759 Â 2% numa-vmstat.node0.nr_slab_unreclaimable
75161178 Â 1% +36.5% 1.026e+08 Â 3% numa-vmstat.node0.numa_local
4394 Â 25% -42.2% 2540 Â 25% numa-vmstat.node1.nr_kernel_stack
52426 Â 0% +59.3% 83495 Â 0% numa-vmstat.node1.numa_other
83295 Â 0% -36.7% 52715 Â 0% numa-vmstat.node2.numa_other
2005 Â 44% -87.6% 248 Â 35% numa-vmstat.node3.nr_inactive_anon
2163 Â 44% -82.0% 388 Â 41% numa-vmstat.node3.nr_shmem
1257390 Â 2% -99.8% 2133 Â 4% sched_debug.cfs_rq[0]:/.tg_load_avg
2115 Â 10% -100.0% 0 Â 0% sched_debug.cfs_rq[0]:/.blocked_load_avg
28699924 Â 5% -65.4% 9932690 Â 4% sched_debug.cfs_rq[0]:/.min_vruntime
51 Â 18% -100.0% 0 Â 0% sched_debug.cfs_rq[0]:/.utilization_load_avg
64335 Â 5% +39.5% 89744 Â 3% sched_debug.cfs_rq[0]:/.exec_clock
14 Â 37% -76.8% 3 Â 33% sched_debug.cfs_rq[0]:/.nr_spread_over
2132 Â 10% -100.0% 0 Â 0% sched_debug.cfs_rq[0]:/.tg_load_contrib
1297304 Â 2% -99.8% 1986 Â 2% sched_debug.cfs_rq[100]:/.tg_load_avg
99 Â 23% -100.0% 0 Â 0% sched_debug.cfs_rq[100]:/.utilization_load_avg
13597 Â 14% -100.0% 0 Â 0% sched_debug.cfs_rq[100]:/.tg_load_contrib
16843099 Â 8% -79.6% 3442369 Â 23% sched_debug.cfs_rq[100]:/.min_vruntime
13538 Â 14% -100.0% 0 Â 0% sched_debug.cfs_rq[100]:/.blocked_load_avg
13109 Â 14% -100.0% 0 Â 0% sched_debug.cfs_rq[101]:/.blocked_load_avg
122 Â 25% -100.0% 0 Â 0% sched_debug.cfs_rq[101]:/.utilization_load_avg
13193 Â 13% -100.0% 0 Â 0% sched_debug.cfs_rq[101]:/.tg_load_contrib
16907143 Â 7% -79.7% 3428806 Â 23% sched_debug.cfs_rq[101]:/.min_vruntime
1297648 Â 2% -99.8% 1986 Â 2% sched_debug.cfs_rq[101]:/.tg_load_avg
17037690 Â 8% -79.8% 3434272 Â 23% sched_debug.cfs_rq[102]:/.min_vruntime
14156 Â 14% -100.0% 0 Â 0% sched_debug.cfs_rq[102]:/.blocked_load_avg
121 Â 44% -100.0% 0 Â 0% sched_debug.cfs_rq[102]:/.utilization_load_avg
14250 Â 14% -100.0% 0 Â 0% sched_debug.cfs_rq[102]:/.tg_load_contrib
1297593 Â 2% -99.8% 1989 Â 3% sched_debug.cfs_rq[102]:/.tg_load_avg
14010 Â 15% -100.0% 0 Â 0% sched_debug.cfs_rq[103]:/.blocked_load_avg
14077 Â 15% -100.0% 0 Â 0% sched_debug.cfs_rq[103]:/.tg_load_contrib
84 Â 17% -100.0% 0 Â 0% sched_debug.cfs_rq[103]:/.utilization_load_avg
1297902 Â 2% -99.8% 1986 Â 3% sched_debug.cfs_rq[103]:/.tg_load_avg
17308420 Â 8% -80.2% 3425797 Â 23% sched_debug.cfs_rq[103]:/.min_vruntime
14535 Â 15% -100.0% 0 Â 0% sched_debug.cfs_rq[104]:/.blocked_load_avg
1297991 Â 2% -99.8% 1985 Â 3% sched_debug.cfs_rq[104]:/.tg_load_avg
17620551 Â 8% -80.6% 3410019 Â 23% sched_debug.cfs_rq[104]:/.min_vruntime
133 Â 20% -100.0% 0 Â 0% sched_debug.cfs_rq[104]:/.utilization_load_avg
14604 Â 16% -100.0% 0 Â 0% sched_debug.cfs_rq[104]:/.tg_load_contrib
1297262 Â 2% -99.8% 1981 Â 3% sched_debug.cfs_rq[105]:/.tg_load_avg
1 Â 33% +700.0% 12 Â 5% sched_debug.cfs_rq[105]:/.runnable_load_avg
15157 Â 11% -100.0% 0 Â 0% sched_debug.cfs_rq[105]:/.tg_load_contrib
115 Â 27% -100.0% 0 Â 0% sched_debug.cfs_rq[105]:/.utilization_load_avg
15063 Â 11% -100.0% 0 Â 0% sched_debug.cfs_rq[105]:/.blocked_load_avg
18220360 Â 2% -85.8% 2588613 Â 32% sched_debug.cfs_rq[105]:/.min_vruntime
17733781 Â 3% -85.4% 2583251 Â 32% sched_debug.cfs_rq[106]:/.min_vruntime
14034 Â 5% -100.0% 0 Â 0% sched_debug.cfs_rq[106]:/.tg_load_contrib
149 Â 46% -100.0% 0 Â 0% sched_debug.cfs_rq[106]:/.utilization_load_avg
1297519 Â 2% -99.8% 1980 Â 3% sched_debug.cfs_rq[106]:/.tg_load_avg
13919 Â 6% -100.0% 0 Â 0% sched_debug.cfs_rq[106]:/.blocked_load_avg
1297805 Â 2% -99.8% 1975 Â 3% sched_debug.cfs_rq[107]:/.tg_load_avg
163 Â 34% -100.0% 0 Â 0% sched_debug.cfs_rq[107]:/.utilization_load_avg
13335 Â 7% -100.0% 0 Â 0% sched_debug.cfs_rq[107]:/.tg_load_contrib
13224 Â 7% -100.0% 0 Â 0% sched_debug.cfs_rq[107]:/.blocked_load_avg
17429448 Â 3% -85.0% 2615721 Â 32% sched_debug.cfs_rq[107]:/.min_vruntime
13087 Â 6% -100.0% 0 Â 0% sched_debug.cfs_rq[108]:/.blocked_load_avg
1298273 Â 2% -99.8% 1973 Â 3% sched_debug.cfs_rq[108]:/.tg_load_avg
17139485 Â 3% -84.9% 2593242 Â 32% sched_debug.cfs_rq[108]:/.min_vruntime
13181 Â 6% -100.0% 0 Â 0% sched_debug.cfs_rq[108]:/.tg_load_contrib
126 Â 26% -100.0% 0 Â 0% sched_debug.cfs_rq[108]:/.utilization_load_avg
13576 Â 5% -100.0% 0 Â 0% sched_debug.cfs_rq[109]:/.tg_load_contrib
17013902 Â 2% -84.7% 2602111 Â 32% sched_debug.cfs_rq[109]:/.min_vruntime
13494 Â 6% -100.0% 0 Â 0% sched_debug.cfs_rq[109]:/.blocked_load_avg
1296843 Â 2% -99.8% 1970 Â 3% sched_debug.cfs_rq[109]:/.tg_load_avg
134 Â 46% -100.0% 0 Â 0% sched_debug.cfs_rq[109]:/.utilization_load_avg
53 Â 42% -100.0% 0 Â 0% sched_debug.cfs_rq[10]:/.utilization_load_avg
63989 Â 4% +36.6% 87389 Â 2% sched_debug.cfs_rq[10]:/.exec_clock
30520609 Â 4% -67.4% 9936406 Â 4% sched_debug.cfs_rq[10]:/.min_vruntime
6828 Â 6% -100.0% 0 Â 0% sched_debug.cfs_rq[10]:/.tg_load_contrib
1263785 Â 2% -99.8% 2110 Â 5% sched_debug.cfs_rq[10]:/.tg_load_avg
6726 Â 6% -100.0% 0 Â 0% sched_debug.cfs_rq[10]:/.blocked_load_avg
12571 Â 8% -100.0% 0 Â 0% sched_debug.cfs_rq[110]:/.tg_load_contrib
89 Â 37% -100.0% 0 Â 0% sched_debug.cfs_rq[110]:/.utilization_load_avg
12509 Â 8% -100.0% 0 Â 0% sched_debug.cfs_rq[110]:/.blocked_load_avg
16981688 Â 2% -84.6% 2622647 Â 33% sched_debug.cfs_rq[110]:/.min_vruntime
1298970 Â 2% -99.8% 1972 Â 3% sched_debug.cfs_rq[110]:/.tg_load_avg
13166 Â 10% -100.0% 0 Â 0% sched_debug.cfs_rq[111]:/.tg_load_contrib
16855401 Â 2% -84.5% 2607113 Â 33% sched_debug.cfs_rq[111]:/.min_vruntime
13072 Â 10% -100.0% 0 Â 0% sched_debug.cfs_rq[111]:/.blocked_load_avg
1298512 Â 2% -99.8% 1974 Â 3% sched_debug.cfs_rq[111]:/.tg_load_avg
100 Â 49% -100.0% 0 Â 0% sched_debug.cfs_rq[111]:/.utilization_load_avg
110 Â 20% -100.0% 0 Â 0% sched_debug.cfs_rq[112]:/.utilization_load_avg
12700 Â 8% -100.0% 0 Â 0% sched_debug.cfs_rq[112]:/.blocked_load_avg
16786802 Â 2% -84.4% 2620052 Â 33% sched_debug.cfs_rq[112]:/.min_vruntime
12786 Â 8% -100.0% 0 Â 0% sched_debug.cfs_rq[112]:/.tg_load_contrib
1299098 Â 2% -99.8% 1974 Â 3% sched_debug.cfs_rq[112]:/.tg_load_avg
12233 Â 8% -100.0% 0 Â 0% sched_debug.cfs_rq[113]:/.tg_load_contrib
12173 Â 8% -100.0% 0 Â 0% sched_debug.cfs_rq[113]:/.blocked_load_avg
101 Â 24% -100.0% 0 Â 0% sched_debug.cfs_rq[113]:/.utilization_load_avg
16747527 Â 2% -84.3% 2621005 Â 32% sched_debug.cfs_rq[113]:/.min_vruntime
1298913 Â 2% -99.8% 1971 Â 3% sched_debug.cfs_rq[113]:/.tg_load_avg
16710632 Â 2% -84.5% 2597564 Â 33% sched_debug.cfs_rq[114]:/.min_vruntime
13379 Â 7% -100.0% 0 Â 0% sched_debug.cfs_rq[114]:/.tg_load_contrib
13324 Â 7% -100.0% 0 Â 0% sched_debug.cfs_rq[114]:/.blocked_load_avg
1298550 Â 2% -99.8% 1965 Â 3% sched_debug.cfs_rq[114]:/.tg_load_avg
1298650 Â 2% -99.8% 1966 Â 3% sched_debug.cfs_rq[115]:/.tg_load_avg
16857312 Â 2% -84.6% 2589869 Â 32% sched_debug.cfs_rq[115]:/.min_vruntime
12773 Â 4% -100.0% 0 Â 0% sched_debug.cfs_rq[115]:/.tg_load_contrib
12701 Â 4% -100.0% 0 Â 0% sched_debug.cfs_rq[115]:/.blocked_load_avg
168 Â 36% -100.0% 0 Â 0% sched_debug.cfs_rq[116]:/.utilization_load_avg
12513 Â 12% -100.0% 0 Â 0% sched_debug.cfs_rq[116]:/.tg_load_contrib
16942146 Â 2% -84.6% 2612577 Â 32% sched_debug.cfs_rq[116]:/.min_vruntime
12422 Â 13% -100.0% 0 Â 0% sched_debug.cfs_rq[116]:/.blocked_load_avg
1296968 Â 2% -99.8% 1965 Â 3% sched_debug.cfs_rq[116]:/.tg_load_avg
13522 Â 9% -100.0% 0 Â 0% sched_debug.cfs_rq[117]:/.tg_load_contrib
13453 Â 9% -100.0% 0 Â 0% sched_debug.cfs_rq[117]:/.blocked_load_avg
17062474 Â 2% -84.8% 2588672 Â 33% sched_debug.cfs_rq[117]:/.min_vruntime
127 Â 35% -100.0% 0 Â 0% sched_debug.cfs_rq[117]:/.utilization_load_avg
1296448 Â 2% -99.8% 1964 Â 3% sched_debug.cfs_rq[117]:/.tg_load_avg
12634 Â 9% -100.0% 0 Â 0% sched_debug.cfs_rq[118]:/.blocked_load_avg
17268018 Â 2% -85.1% 2579101 Â 32% sched_debug.cfs_rq[118]:/.min_vruntime
95 Â 49% -100.0% 0 Â 0% sched_debug.cfs_rq[118]:/.utilization_load_avg
1296574 Â 2% -99.8% 1964 Â 3% sched_debug.cfs_rq[118]:/.tg_load_avg
12681 Â 9% -100.0% 0 Â 0% sched_debug.cfs_rq[118]:/.tg_load_contrib
17625296 Â 2% -85.4% 2574849 Â 32% sched_debug.cfs_rq[119]:/.min_vruntime
12767 Â 7% -100.0% 0 Â 0% sched_debug.cfs_rq[119]:/.blocked_load_avg
127 Â 30% -100.0% 0 Â 0% sched_debug.cfs_rq[119]:/.utilization_load_avg
12836 Â 7% -100.0% 0 Â 0% sched_debug.cfs_rq[119]:/.tg_load_contrib
1296763 Â 2% -99.8% 1966 Â 3% sched_debug.cfs_rq[119]:/.tg_load_avg
1 Â 0% +1250.0% 13 Â 8% sched_debug.cfs_rq[11]:/.runnable_load_avg
1264432 Â 2% -99.8% 2108 Â 5% sched_debug.cfs_rq[11]:/.tg_load_avg
53 Â 34% -100.0% 0 Â 0% sched_debug.cfs_rq[11]:/.utilization_load_avg
30345143 Â 4% -67.3% 9918910 Â 5% sched_debug.cfs_rq[11]:/.min_vruntime
63568 Â 4% +37.4% 87364 Â 3% sched_debug.cfs_rq[11]:/.exec_clock
7120 Â 5% -100.0% 0 Â 0% sched_debug.cfs_rq[11]:/.tg_load_contrib
7029 Â 5% -100.0% 0 Â 0% sched_debug.cfs_rq[11]:/.blocked_load_avg
63197 Â 4% +38.2% 87315 Â 3% sched_debug.cfs_rq[12]:/.exec_clock
48 Â 19% -100.0% 0 Â 0% sched_debug.cfs_rq[12]:/.utilization_load_avg
6997 Â 5% -100.0% 0 Â 0% sched_debug.cfs_rq[12]:/.blocked_load_avg
30157769 Â 4% -67.1% 9923363 Â 5% sched_debug.cfs_rq[12]:/.min_vruntime
1263051 Â 2% -99.8% 2106 Â 5% sched_debug.cfs_rq[12]:/.tg_load_avg
7084 Â 6% -100.0% 0 Â 0% sched_debug.cfs_rq[12]:/.tg_load_contrib
1717292 Â 22% -64.2% 615535 Â 25% sched_debug.cfs_rq[13]:/.max_vruntime
1717292 Â 22% -64.2% 615535 Â 25% sched_debug.cfs_rq[13]:/.MIN_vruntime
7336 Â 3% -100.0% 0 Â 0% sched_debug.cfs_rq[13]:/.tg_load_contrib
7243 Â 3% -100.0% 0 Â 0% sched_debug.cfs_rq[13]:/.blocked_load_avg
30048314 Â 4% -67.0% 9913403 Â 4% sched_debug.cfs_rq[13]:/.min_vruntime
1264142 Â 2% -99.8% 2100 Â 5% sched_debug.cfs_rq[13]:/.tg_load_avg
63053 Â 4% +38.9% 87610 Â 3% sched_debug.cfs_rq[13]:/.exec_clock
6776 Â 7% -100.0% 0 Â 0% sched_debug.cfs_rq[14]:/.tg_load_contrib
62893 Â 4% +39.2% 87531 Â 3% sched_debug.cfs_rq[14]:/.exec_clock
60 Â 16% -100.0% 0 Â 0% sched_debug.cfs_rq[14]:/.utilization_load_avg
29921296 Â 4% -66.9% 9894201 Â 4% sched_debug.cfs_rq[14]:/.min_vruntime
1262418 Â 2% -99.8% 2096 Â 5% sched_debug.cfs_rq[14]:/.tg_load_avg
6678 Â 7% -100.0% 0 Â 0% sched_debug.cfs_rq[14]:/.blocked_load_avg
1264287 Â 2% -99.8% 2092 Â 5% sched_debug.cfs_rq[15]:/.tg_load_avg
8277 Â 13% -100.0% 0 Â 0% sched_debug.cfs_rq[15]:/.tg_load_contrib
8162 Â 13% -100.0% 0 Â 0% sched_debug.cfs_rq[15]:/.blocked_load_avg
91 Â 31% -100.0% 0 Â 0% sched_debug.cfs_rq[16]:/.utilization_load_avg
1264502 Â 2% -99.8% 2090 Â 5% sched_debug.cfs_rq[16]:/.tg_load_avg
7647 Â 6% -100.0% 0 Â 0% sched_debug.cfs_rq[16]:/.blocked_load_avg
7737 Â 6% -100.0% 0 Â 0% sched_debug.cfs_rq[16]:/.tg_load_contrib
1 Â 33% +1066.7% 17 Â 14% sched_debug.cfs_rq[17]:/.runnable_load_avg
7953 Â 17% -100.0% 0 Â 0% sched_debug.cfs_rq[17]:/.blocked_load_avg
8045 Â 17% -100.0% 0 Â 0% sched_debug.cfs_rq[17]:/.tg_load_contrib
1266771 Â 2% -99.8% 2087 Â 5% sched_debug.cfs_rq[17]:/.tg_load_avg
8027 Â 13% -100.0% 0 Â 0% sched_debug.cfs_rq[18]:/.blocked_load_avg
69 Â 25% -100.0% 0 Â 0% sched_debug.cfs_rq[18]:/.utilization_load_avg
8149 Â 13% -100.0% 0 Â 0% sched_debug.cfs_rq[18]:/.tg_load_contrib
1266363 Â 2% -99.8% 2081 Â 5% sched_debug.cfs_rq[18]:/.tg_load_avg
7689 Â 9% -100.0% 0 Â 0% sched_debug.cfs_rq[19]:/.blocked_load_avg
1268542 Â 2% -99.8% 2076 Â 5% sched_debug.cfs_rq[19]:/.tg_load_avg
7821 Â 9% -100.0% 0 Â 0% sched_debug.cfs_rq[19]:/.tg_load_contrib
65966 Â 4% +32.2% 87214 Â 3% sched_debug.cfs_rq[1]:/.exec_clock
31421923 Â 4% -68.6% 9853570 Â 5% sched_debug.cfs_rq[1]:/.min_vruntime
8521 Â 9% -100.0% 0 Â 0% sched_debug.cfs_rq[1]:/.blocked_load_avg
58 Â 40% -100.0% 0 Â 0% sched_debug.cfs_rq[1]:/.utilization_load_avg
8629 Â 9% -100.0% 0 Â 0% sched_debug.cfs_rq[1]:/.tg_load_contrib
1 Â 34% +940.0% 13 Â 7% sched_debug.cfs_rq[1]:/.runnable_load_avg
1259904 Â 2% -99.8% 2129 Â 5% sched_debug.cfs_rq[1]:/.tg_load_avg
7903 Â 11% -100.0% 0 Â 0% sched_debug.cfs_rq[20]:/.tg_load_contrib
7788 Â 11% -100.0% 0 Â 0% sched_debug.cfs_rq[20]:/.blocked_load_avg
1 Â 0% +1650.0% 17 Â 18% sched_debug.cfs_rq[20]:/.runnable_load_avg
1268618 Â 2% -99.8% 2075 Â 5% sched_debug.cfs_rq[20]:/.tg_load_avg
70 Â 33% -100.0% 0 Â 0% sched_debug.cfs_rq[20]:/.utilization_load_avg
7483 Â 11% -100.0% 0 Â 0% sched_debug.cfs_rq[21]:/.blocked_load_avg
1267874 Â 2% -99.8% 2072 Â 5% sched_debug.cfs_rq[21]:/.tg_load_avg
7603 Â 11% -100.0% 0 Â 0% sched_debug.cfs_rq[21]:/.tg_load_contrib
7412 Â 14% -100.0% 0 Â 0% sched_debug.cfs_rq[22]:/.blocked_load_avg
1269455 Â 2% -99.8% 2067 Â 5% sched_debug.cfs_rq[22]:/.tg_load_avg
7486 Â 14% -100.0% 0 Â 0% sched_debug.cfs_rq[22]:/.tg_load_contrib
7603 Â 11% -100.0% 0 Â 0% sched_debug.cfs_rq[23]:/.tg_load_contrib
1268827 Â 2% -99.8% 2062 Â 5% sched_debug.cfs_rq[23]:/.tg_load_avg
7506 Â 11% -100.0% 0 Â 0% sched_debug.cfs_rq[23]:/.blocked_load_avg
38 Â 47% -100.0% 0 Â 0% sched_debug.cfs_rq[23]:/.utilization_load_avg
7408 Â 10% -100.0% 0 Â 0% sched_debug.cfs_rq[24]:/.blocked_load_avg
1 Â 34% +1420.0% 19 Â 18% sched_debug.cfs_rq[24]:/.runnable_load_avg
1272125 Â 2% -99.8% 2060 Â 5% sched_debug.cfs_rq[24]:/.tg_load_avg
7527 Â 10% -100.0% 0 Â 0% sched_debug.cfs_rq[24]:/.tg_load_contrib
8034 Â 6% -100.0% 0 Â 0% sched_debug.cfs_rq[25]:/.blocked_load_avg
8175 Â 6% -100.0% 0 Â 0% sched_debug.cfs_rq[25]:/.tg_load_contrib
32 Â 30% -100.0% 0 Â 0% sched_debug.cfs_rq[25]:/.utilization_load_avg
1 Â 0% +1725.0% 18 Â 18% sched_debug.cfs_rq[25]:/.runnable_load_avg
1273513 Â 2% -99.8% 2059 Â 5% sched_debug.cfs_rq[25]:/.tg_load_avg
1271660 Â 2% -99.8% 2055 Â 5% sched_debug.cfs_rq[26]:/.tg_load_avg
7386 Â 16% -100.0% 0 Â 0% sched_debug.cfs_rq[26]:/.blocked_load_avg
7496 Â 17% -100.0% 0 Â 0% sched_debug.cfs_rq[26]:/.tg_load_contrib
8025 Â 13% -100.0% 0 Â 0% sched_debug.cfs_rq[27]:/.tg_load_contrib
1270142 Â 2% -99.8% 2048 Â 5% sched_debug.cfs_rq[27]:/.tg_load_avg
7899 Â 13% -100.0% 0 Â 0% sched_debug.cfs_rq[27]:/.blocked_load_avg
7912 Â 7% -100.0% 0 Â 0% sched_debug.cfs_rq[28]:/.blocked_load_avg
96 Â 41% -100.0% 0 Â 0% sched_debug.cfs_rq[28]:/.utilization_load_avg
8021 Â 7% -100.0% 0 Â 0% sched_debug.cfs_rq[28]:/.tg_load_contrib
1272513 Â 2% -99.8% 2047 Â 5% sched_debug.cfs_rq[28]:/.tg_load_avg
7453 Â 9% -100.0% 0 Â 0% sched_debug.cfs_rq[29]:/.blocked_load_avg
7567 Â 9% -100.0% 0 Â 0% sched_debug.cfs_rq[29]:/.tg_load_contrib
1273454 Â 1% -99.8% 2045 Â 5% sched_debug.cfs_rq[29]:/.tg_load_avg
68132 Â 3% +29.9% 88514 Â 4% sched_debug.cfs_rq[2]:/.exec_clock
32561605 Â 3% -69.6% 9886080 Â 4% sched_debug.cfs_rq[2]:/.min_vruntime
7200 Â 5% -100.0% 0 Â 0% sched_debug.cfs_rq[2]:/.blocked_load_avg
1255791 Â 2% -99.8% 2129 Â 5% sched_debug.cfs_rq[2]:/.tg_load_avg
7297 Â 5% -100.0% 0 Â 0% sched_debug.cfs_rq[2]:/.tg_load_contrib
1 Â 0% +1325.0% 14 Â 12% sched_debug.cfs_rq[30]:/.runnable_load_avg
1274303 Â 1% -99.8% 2043 Â 5% sched_debug.cfs_rq[30]:/.tg_load_avg
60 Â 44% -100.0% 0 Â 0% sched_debug.cfs_rq[30]:/.utilization_load_avg
8222 Â 10% -100.0% 0 Â 0% sched_debug.cfs_rq[30]:/.blocked_load_avg
8350 Â 10% -100.0% 0 Â 0% sched_debug.cfs_rq[30]:/.tg_load_contrib
32370033 Â 6% -74.5% 8241237 Â 15% sched_debug.cfs_rq[30]:/.min_vruntime
1276736 Â 1% -99.8% 2041 Â 5% sched_debug.cfs_rq[31]:/.tg_load_avg
7796 Â 9% -100.0% 0 Â 0% sched_debug.cfs_rq[31]:/.tg_load_contrib
32444271 Â 6% -74.9% 8133972 Â 14% sched_debug.cfs_rq[31]:/.min_vruntime
7675 Â 9% -100.0% 0 Â 0% sched_debug.cfs_rq[31]:/.blocked_load_avg
49 Â 22% -100.0% 0 Â 0% sched_debug.cfs_rq[31]:/.utilization_load_avg
32025935 Â 7% -74.5% 8152920 Â 15% sched_debug.cfs_rq[32]:/.min_vruntime
7761 Â 10% -100.0% 0 Â 0% sched_debug.cfs_rq[32]:/.tg_load_contrib
7629 Â 10% -100.0% 0 Â 0% sched_debug.cfs_rq[32]:/.blocked_load_avg
1276111 Â 1% -99.8% 2034 Â 5% sched_debug.cfs_rq[32]:/.tg_load_avg
1276774 Â 1% -99.8% 2031 Â 5% sched_debug.cfs_rq[33]:/.tg_load_avg
31628083 Â 7% -74.2% 8144756 Â 15% sched_debug.cfs_rq[33]:/.min_vruntime
7696 Â 6% -100.0% 0 Â 0% sched_debug.cfs_rq[33]:/.blocked_load_avg
7826 Â 6% -100.0% 0 Â 0% sched_debug.cfs_rq[33]:/.tg_load_contrib
31244748 Â 7% -73.9% 8143865 Â 15% sched_debug.cfs_rq[34]:/.min_vruntime
7539 Â 13% -100.0% 0 Â 0% sched_debug.cfs_rq[34]:/.tg_load_contrib
45 Â 34% -100.0% 0 Â 0% sched_debug.cfs_rq[34]:/.utilization_load_avg
7436 Â 13% -100.0% 0 Â 0% sched_debug.cfs_rq[34]:/.blocked_load_avg
1277729 Â 1% -99.8% 2028 Â 5% sched_debug.cfs_rq[34]:/.tg_load_avg
1 Â 0% +1400.0% 15 Â 12% sched_debug.cfs_rq[34]:/.runnable_load_avg
59 Â 30% -100.0% 0 Â 0% sched_debug.cfs_rq[35]:/.utilization_load_avg
6769 Â 6% -100.0% 0 Â 0% sched_debug.cfs_rq[35]:/.blocked_load_avg
6867 Â 6% -100.0% 0 Â 0% sched_debug.cfs_rq[35]:/.tg_load_contrib
30835233 Â 8% -73.3% 8237780 Â 15% sched_debug.cfs_rq[35]:/.min_vruntime
1276250 Â 1% -99.8% 2023 Â 5% sched_debug.cfs_rq[35]:/.tg_load_avg
1276527 Â 1% -99.8% 2019 Â 5% sched_debug.cfs_rq[36]:/.tg_load_avg
7790 Â 15% -100.0% 0 Â 0% sched_debug.cfs_rq[36]:/.blocked_load_avg
91 Â 22% -100.0% 0 Â 0% sched_debug.cfs_rq[36]:/.utilization_load_avg
31206999 Â 6% -73.7% 8216155 Â 15% sched_debug.cfs_rq[36]:/.min_vruntime
7898 Â 15% -100.0% 0 Â 0% sched_debug.cfs_rq[36]:/.tg_load_contrib
1276979 Â 1% -99.8% 2018 Â 5% sched_debug.cfs_rq[37]:/.tg_load_avg
7495 Â 5% -100.0% 0 Â 0% sched_debug.cfs_rq[37]:/.tg_load_contrib
31284022 Â 6% -73.7% 8225683 Â 15% sched_debug.cfs_rq[37]:/.min_vruntime
7376 Â 5% -100.0% 0 Â 0% sched_debug.cfs_rq[37]:/.blocked_load_avg
1 Â 0% +1350.0% 14 Â 14% sched_debug.cfs_rq[38]:/.runnable_load_avg
7167 Â 3% -100.0% 0 Â 0% sched_debug.cfs_rq[38]:/.blocked_load_avg
60 Â 28% -100.0% 0 Â 0% sched_debug.cfs_rq[38]:/.utilization_load_avg
31287625 Â 5% -73.9% 8159876 Â 15% sched_debug.cfs_rq[38]:/.min_vruntime
7274 Â 4% -100.0% 0 Â 0% sched_debug.cfs_rq[38]:/.tg_load_contrib
1275869 Â 1% -99.8% 2017 Â 5% sched_debug.cfs_rq[38]:/.tg_load_avg
7581 Â 12% -100.0% 0 Â 0% sched_debug.cfs_rq[39]:/.blocked_load_avg
1036555 Â 32% -68.3% 328720 Â 48% sched_debug.cfs_rq[39]:/.MIN_vruntime
1036555 Â 32% -68.3% 328720 Â 48% sched_debug.cfs_rq[39]:/.max_vruntime
63 Â 33% -100.0% 0 Â 0% sched_debug.cfs_rq[39]:/.utilization_load_avg
7709 Â 12% -100.0% 0 Â 0% sched_debug.cfs_rq[39]:/.tg_load_contrib
1277209 Â 1% -99.8% 2012 Â 6% sched_debug.cfs_rq[39]:/.tg_load_avg
31179610 Â 5% -73.7% 8186405 Â 15% sched_debug.cfs_rq[39]:/.min_vruntime
67498 Â 4% +34.3% 90623 Â 2% sched_debug.cfs_rq[3]:/.exec_clock
7223 Â 6% -100.0% 0 Â 0% sched_debug.cfs_rq[3]:/.tg_load_contrib
32380079 Â 3% -69.4% 9900194 Â 5% sched_debug.cfs_rq[3]:/.min_vruntime
7110 Â 6% -100.0% 0 Â 0% sched_debug.cfs_rq[3]:/.blocked_load_avg
1256951 Â 2% -99.8% 2126 Â 5% sched_debug.cfs_rq[3]:/.tg_load_avg
1278998 Â 1% -99.8% 2008 Â 6% sched_debug.cfs_rq[40]:/.tg_load_avg
7455 Â 16% -100.0% 0 Â 0% sched_debug.cfs_rq[40]:/.blocked_load_avg
39 Â 43% -100.0% 0 Â 0% sched_debug.cfs_rq[40]:/.utilization_load_avg
7588 Â 17% -100.0% 0 Â 0% sched_debug.cfs_rq[40]:/.tg_load_contrib
31181443 Â 5% -73.8% 8166086 Â 15% sched_debug.cfs_rq[40]:/.min_vruntime
8071 Â 13% -100.0% 0 Â 0% sched_debug.cfs_rq[41]:/.tg_load_contrib
48 Â 48% -100.0% 0 Â 0% sched_debug.cfs_rq[41]:/.utilization_load_avg
31219849 Â 6% -74.0% 8128936 Â 15% sched_debug.cfs_rq[41]:/.min_vruntime
7959 Â 13% -100.0% 0 Â 0% sched_debug.cfs_rq[41]:/.blocked_load_avg
1279508 Â 1% -99.8% 2006 Â 6% sched_debug.cfs_rq[41]:/.tg_load_avg
0 Â 0% +Inf% 2 Â 44% sched_debug.cfs_rq[41]:/.load
1278063 Â 1% -99.8% 2003 Â 6% sched_debug.cfs_rq[42]:/.tg_load_avg
31280500 Â 6% -74.0% 8145420 Â 15% sched_debug.cfs_rq[42]:/.min_vruntime
7558 Â 15% -100.0% 0 Â 0% sched_debug.cfs_rq[42]:/.tg_load_contrib
7474 Â 15% -100.0% 0 Â 0% sched_debug.cfs_rq[42]:/.blocked_load_avg
1 Â 0% +1375.0% 14 Â 10% sched_debug.cfs_rq[43]:/.runnable_load_avg
1279939 Â 1% -99.8% 2004 Â 6% sched_debug.cfs_rq[43]:/.tg_load_avg
38 Â 46% -100.0% 0 Â 0% sched_debug.cfs_rq[43]:/.utilization_load_avg
7496 Â 13% -100.0% 0 Â 0% sched_debug.cfs_rq[43]:/.blocked_load_avg
31380609 Â 6% -74.0% 8147480 Â 15% sched_debug.cfs_rq[43]:/.min_vruntime
7601 Â 12% -100.0% 0 Â 0% sched_debug.cfs_rq[43]:/.tg_load_contrib
1279763 Â 1% -99.8% 2000 Â 6% sched_debug.cfs_rq[44]:/.tg_load_avg
31657973 Â 6% -74.3% 8141050 Â 15% sched_debug.cfs_rq[44]:/.min_vruntime
8355 Â 10% -100.0% 0 Â 0% sched_debug.cfs_rq[44]:/.tg_load_contrib
8250 Â 10% -100.0% 0 Â 0% sched_debug.cfs_rq[44]:/.blocked_load_avg
8589 Â 10% -100.0% 0 Â 0% sched_debug.cfs_rq[45]:/.tg_load_contrib
68 Â 45% -100.0% 0 Â 0% sched_debug.cfs_rq[45]:/.utilization_load_avg
1279921 Â 1% -99.8% 1999 Â 6% sched_debug.cfs_rq[45]:/.tg_load_avg
32303098 Â 2% -81.2% 6060450 Â 27% sched_debug.cfs_rq[45]:/.min_vruntime
8480 Â 10% -100.0% 0 Â 0% sched_debug.cfs_rq[45]:/.blocked_load_avg
32574894 Â 2% -81.5% 6025932 Â 28% sched_debug.cfs_rq[46]:/.min_vruntime
1282275 Â 1% -99.8% 2001 Â 6% sched_debug.cfs_rq[46]:/.tg_load_avg
7869 Â 5% -100.0% 0 Â 0% sched_debug.cfs_rq[46]:/.blocked_load_avg
7995 Â 5% -100.0% 0 Â 0% sched_debug.cfs_rq[46]:/.tg_load_contrib
1 Â 0% +1375.0% 14 Â 5% sched_debug.cfs_rq[46]:/.runnable_load_avg
32356280 Â 2% -81.5% 5995175 Â 29% sched_debug.cfs_rq[47]:/.min_vruntime
1281150 Â 2% -99.8% 2005 Â 6% sched_debug.cfs_rq[47]:/.tg_load_avg
7930 Â 14% -100.0% 0 Â 0% sched_debug.cfs_rq[47]:/.blocked_load_avg
76 Â 49% -100.0% 0 Â 0% sched_debug.cfs_rq[47]:/.utilization_load_avg
8081 Â 14% -100.0% 0 Â 0% sched_debug.cfs_rq[47]:/.tg_load_contrib
50 Â 18% -100.0% 0 Â 0% sched_debug.cfs_rq[48]:/.utilization_load_avg
7808 Â 5% -100.0% 0 Â 0% sched_debug.cfs_rq[48]:/.blocked_load_avg
7911 Â 5% -100.0% 0 Â 0% sched_debug.cfs_rq[48]:/.tg_load_contrib
1280329 Â 1% -99.8% 2002 Â 6% sched_debug.cfs_rq[48]:/.tg_load_avg
1 Â 0% +1450.0% 15 Â 7% sched_debug.cfs_rq[48]:/.runnable_load_avg
32077870 Â 2% -81.2% 6026082 Â 27% sched_debug.cfs_rq[48]:/.min_vruntime
8085 Â 4% -100.0% 0 Â 0% sched_debug.cfs_rq[49]:/.tg_load_contrib
31747791 Â 2% -81.1% 6009817 Â 28% sched_debug.cfs_rq[49]:/.min_vruntime
1281102 Â 1% -99.8% 1997 Â 6% sched_debug.cfs_rq[49]:/.tg_load_avg
7957 Â 4% -100.0% 0 Â 0% sched_debug.cfs_rq[49]:/.blocked_load_avg
67069 Â 4% +30.4% 87454 Â 3% sched_debug.cfs_rq[4]:/.exec_clock
31984181 Â 3% -68.9% 9948076 Â 5% sched_debug.cfs_rq[4]:/.min_vruntime
1258251 Â 2% -99.8% 2123 Â 5% sched_debug.cfs_rq[4]:/.tg_load_avg
7152 Â 4% -100.0% 0 Â 0% sched_debug.cfs_rq[4]:/.blocked_load_avg
1 Â 34% +960.0% 13 Â 6% sched_debug.cfs_rq[4]:/.runnable_load_avg
7264 Â 4% -100.0% 0 Â 0% sched_debug.cfs_rq[4]:/.tg_load_contrib
48 Â 34% -100.0% 0 Â 0% sched_debug.cfs_rq[4]:/.utilization_load_avg
1282542 Â 2% -99.8% 1993 Â 6% sched_debug.cfs_rq[50]:/.tg_load_avg
7716 Â 8% -100.0% 0 Â 0% sched_debug.cfs_rq[50]:/.tg_load_contrib
7592 Â 8% -100.0% 0 Â 0% sched_debug.cfs_rq[50]:/.blocked_load_avg
31620130 Â 2% -80.7% 6090204 Â 28% sched_debug.cfs_rq[50]:/.min_vruntime
7663 Â 10% -100.0% 0 Â 0% sched_debug.cfs_rq[51]:/.blocked_load_avg
31495201 Â 2% -80.8% 6058202 Â 28% sched_debug.cfs_rq[51]:/.min_vruntime
80 Â 26% -100.0% 0 Â 0% sched_debug.cfs_rq[51]:/.utilization_load_avg
7768 Â 10% -100.0% 0 Â 0% sched_debug.cfs_rq[51]:/.tg_load_contrib
1282028 Â 1% -99.8% 1987 Â 6% sched_debug.cfs_rq[51]:/.tg_load_avg
31275600 Â 2% -80.7% 6034367 Â 28% sched_debug.cfs_rq[52]:/.min_vruntime
7925 Â 16% -100.0% 0 Â 0% sched_debug.cfs_rq[52]:/.blocked_load_avg
1284572 Â 1% -99.8% 1990 Â 6% sched_debug.cfs_rq[52]:/.tg_load_avg
8036 Â 16% -100.0% 0 Â 0% sched_debug.cfs_rq[52]:/.tg_load_contrib
31176732 Â 2% -80.6% 6054590 Â 28% sched_debug.cfs_rq[53]:/.min_vruntime
1284665 Â 2% -99.8% 1988 Â 6% sched_debug.cfs_rq[53]:/.tg_load_avg
7571 Â 7% -100.0% 0 Â 0% sched_debug.cfs_rq[53]:/.tg_load_contrib
7469 Â 8% -100.0% 0 Â 0% sched_debug.cfs_rq[53]:/.blocked_load_avg
31105150 Â 2% -80.7% 5993395 Â 28% sched_debug.cfs_rq[54]:/.min_vruntime
7313 Â 7% -100.0% 0 Â 0% sched_debug.cfs_rq[54]:/.blocked_load_avg
1285119 Â 2% -99.8% 1984 Â 6% sched_debug.cfs_rq[54]:/.tg_load_avg
7424 Â 7% -100.0% 0 Â 0% sched_debug.cfs_rq[54]:/.tg_load_contrib
1286085 Â 2% -99.8% 1980 Â 6% sched_debug.cfs_rq[55]:/.tg_load_avg
8183 Â 8% -100.0% 0 Â 0% sched_debug.cfs_rq[55]:/.tg_load_contrib
8079 Â 8% -100.0% 0 Â 0% sched_debug.cfs_rq[55]:/.blocked_load_avg
31123445 Â 2% -80.8% 5975863 Â 28% sched_debug.cfs_rq[55]:/.min_vruntime
7561 Â 14% -100.0% 0 Â 0% sched_debug.cfs_rq[56]:/.tg_load_contrib
1285969 Â 2% -99.8% 1980 Â 6% sched_debug.cfs_rq[56]:/.tg_load_avg
31208163 Â 2% -80.9% 5968001 Â 28% sched_debug.cfs_rq[56]:/.min_vruntime
45 Â 46% -100.0% 0 Â 0% sched_debug.cfs_rq[56]:/.utilization_load_avg
7460 Â 14% -100.0% 0 Â 0% sched_debug.cfs_rq[56]:/.blocked_load_avg
1 Â 34% +1140.0% 15 Â 9% sched_debug.cfs_rq[56]:/.runnable_load_avg
76 Â 38% -100.0% 0 Â 0% sched_debug.cfs_rq[57]:/.utilization_load_avg
7834 Â 6% -100.0% 0 Â 0% sched_debug.cfs_rq[57]:/.blocked_load_avg
31282504 Â 2% -80.9% 5971017 Â 27% sched_debug.cfs_rq[57]:/.min_vruntime
7932 Â 6% -100.0% 0 Â 0% sched_debug.cfs_rq[57]:/.tg_load_contrib
1287860 Â 2% -99.8% 1982 Â 6% sched_debug.cfs_rq[57]:/.tg_load_avg
1 Â 47% +814.3% 16 Â 11% sched_debug.cfs_rq[57]:/.runnable_load_avg
8087 Â 9% -100.0% 0 Â 0% sched_debug.cfs_rq[58]:/.blocked_load_avg
8213 Â 9% -100.0% 0 Â 0% sched_debug.cfs_rq[58]:/.tg_load_contrib
31460262 Â 2% -80.9% 5999599 Â 28% sched_debug.cfs_rq[58]:/.min_vruntime
74 Â 40% -100.0% 0 Â 0% sched_debug.cfs_rq[58]:/.utilization_load_avg
1286949 Â 2% -99.8% 1985 Â 6% sched_debug.cfs_rq[58]:/.tg_load_avg
1288900 Â 2% -99.8% 1985 Â 6% sched_debug.cfs_rq[59]:/.tg_load_avg
82 Â 45% -100.0% 0 Â 0% sched_debug.cfs_rq[59]:/.utilization_load_avg
31665848 Â 2% -81.1% 5982858 Â 28% sched_debug.cfs_rq[59]:/.min_vruntime
1 Â 34% +1180.0% 16 Â 11% sched_debug.cfs_rq[59]:/.runnable_load_avg
8296 Â 11% -100.0% 0 Â 0% sched_debug.cfs_rq[59]:/.blocked_load_avg
8421 Â 11% -100.0% 0 Â 0% sched_debug.cfs_rq[59]:/.tg_load_contrib
66338 Â 3% +32.1% 87605 Â 3% sched_debug.cfs_rq[5]:/.exec_clock
1259776 Â 2% -99.8% 2121 Â 5% sched_debug.cfs_rq[5]:/.tg_load_avg
2903821 Â 15% -96.9% 89014 Â 43% sched_debug.cfs_rq[5]:/.spread0
7252 Â 5% -100.0% 0 Â 0% sched_debug.cfs_rq[5]:/.tg_load_contrib
59 Â 31% -100.0% 0 Â 0% sched_debug.cfs_rq[5]:/.utilization_load_avg
31604243 Â 4% -68.3% 10021931 Â 5% sched_debug.cfs_rq[5]:/.min_vruntime
7138 Â 5% -100.0% 0 Â 0% sched_debug.cfs_rq[5]:/.blocked_load_avg
1 Â 0% +1250.0% 13 Â 3% sched_debug.cfs_rq[5]:/.runnable_load_avg
17261898 Â 6% -75.8% 4174611 Â 3% sched_debug.cfs_rq[60]:/.min_vruntime
155 Â 23% -100.0% 0 Â 0% sched_debug.cfs_rq[60]:/.utilization_load_avg
14071 Â 7% -100.0% 0 Â 0% sched_debug.cfs_rq[60]:/.blocked_load_avg
1291042 Â 2% -99.8% 1987 Â 6% sched_debug.cfs_rq[60]:/.tg_load_avg
37257 Â 7% +19.8% 44627 Â 7% sched_debug.cfs_rq[60]:/.exec_clock
14143 Â 6% -100.0% 0 Â 0% sched_debug.cfs_rq[60]:/.tg_load_contrib
18970013 Â 5% -78.4% 4104573 Â 3% sched_debug.cfs_rq[61]:/.min_vruntime
173 Â 32% -100.0% 0 Â 0% sched_debug.cfs_rq[61]:/.utilization_load_avg
16260 Â 9% -100.0% 0 Â 0% sched_debug.cfs_rq[61]:/.tg_load_contrib
40852 Â 6% +11.5% 45550 Â 6% sched_debug.cfs_rq[61]:/.exec_clock
16158 Â 9% -100.0% 0 Â 0% sched_debug.cfs_rq[61]:/.blocked_load_avg
1290339 Â 2% -99.8% 1990 Â 6% sched_debug.cfs_rq[61]:/.tg_load_avg
38545 Â 6% +17.7% 45356 Â 7% sched_debug.cfs_rq[62]:/.exec_clock
15379 Â 3% -100.0% 0 Â 0% sched_debug.cfs_rq[62]:/.tg_load_contrib
15302 Â 3% -100.0% 0 Â 0% sched_debug.cfs_rq[62]:/.blocked_load_avg
17851602 Â 5% -76.9% 4122318 Â 3% sched_debug.cfs_rq[62]:/.min_vruntime
1289834 Â 2% -99.8% 1992 Â 6% sched_debug.cfs_rq[62]:/.tg_load_avg
37345 Â 5% +21.0% 45199 Â 8% sched_debug.cfs_rq[63]:/.exec_clock
17288069 Â 5% -76.3% 4100823 Â 2% sched_debug.cfs_rq[63]:/.min_vruntime
111 Â 36% -100.0% 0 Â 0% sched_debug.cfs_rq[63]:/.utilization_load_avg
14615 Â 2% -100.0% 0 Â 0% sched_debug.cfs_rq[63]:/.tg_load_contrib
14512 Â 2% -100.0% 0 Â 0% sched_debug.cfs_rq[63]:/.blocked_load_avg
1288713 Â 1% -99.8% 1988 Â 6% sched_debug.cfs_rq[63]:/.tg_load_avg
15023 Â 3% -100.0% 0 Â 0% sched_debug.cfs_rq[64]:/.blocked_load_avg
36714 Â 5% +23.2% 45215 Â 7% sched_debug.cfs_rq[64]:/.exec_clock
16943908 Â 5% -75.8% 4102781 Â 4% sched_debug.cfs_rq[64]:/.min_vruntime
15120 Â 3% -100.0% 0 Â 0% sched_debug.cfs_rq[64]:/.tg_load_contrib
1289203 Â 2% -99.8% 1983 Â 6% sched_debug.cfs_rq[64]:/.tg_load_avg
1288770 Â 1% -99.8% 1987 Â 5% sched_debug.cfs_rq[65]:/.tg_load_avg
14368 Â 6% -100.0% 0 Â 0% sched_debug.cfs_rq[65]:/.tg_load_contrib
36363 Â 5% +24.6% 45315 Â 7% sched_debug.cfs_rq[65]:/.exec_clock
16805184 Â 5% -75.4% 4127563 Â 3% sched_debug.cfs_rq[65]:/.min_vruntime
14276 Â 6% -100.0% 0 Â 0% sched_debug.cfs_rq[65]:/.blocked_load_avg
120 Â 46% -100.0% 0 Â 0% sched_debug.cfs_rq[65]:/.utilization_load_avg
35814 Â 5% +26.5% 45320 Â 7% sched_debug.cfs_rq[66]:/.exec_clock
16586490 Â 5% -75.2% 4110024 Â 4% sched_debug.cfs_rq[66]:/.min_vruntime
14003 Â 1% -100.0% 0 Â 0% sched_debug.cfs_rq[66]:/.blocked_load_avg
110 Â 20% -100.0% 0 Â 0% sched_debug.cfs_rq[66]:/.utilization_load_avg
14100 Â 1% -100.0% 0 Â 0% sched_debug.cfs_rq[66]:/.tg_load_contrib
1289222 Â 2% -99.8% 1987 Â 5% sched_debug.cfs_rq[66]:/.tg_load_avg
14693 Â 8% -100.0% 0 Â 0% sched_debug.cfs_rq[67]:/.tg_load_contrib
121 Â 18% -100.0% 0 Â 0% sched_debug.cfs_rq[67]:/.utilization_load_avg
14593 Â 8% -100.0% 0 Â 0% sched_debug.cfs_rq[67]:/.blocked_load_avg
36596 Â 7% +22.7% 44914 Â 7% sched_debug.cfs_rq[67]:/.exec_clock
1288991 Â 2% -99.8% 1990 Â 6% sched_debug.cfs_rq[67]:/.tg_load_avg
16933869 Â 7% -75.7% 4122680 Â 4% sched_debug.cfs_rq[67]:/.min_vruntime
35949 Â 6% +25.3% 45027 Â 7% sched_debug.cfs_rq[68]:/.exec_clock
133 Â 18% -100.0% 0 Â 0% sched_debug.cfs_rq[68]:/.utilization_load_avg
13595 Â 3% -100.0% 0 Â 0% sched_debug.cfs_rq[68]:/.tg_load_contrib
1291134 Â 1% -99.8% 1984 Â 5% sched_debug.cfs_rq[68]:/.tg_load_avg
16638068 Â 6% -75.4% 4099752 Â 5% sched_debug.cfs_rq[68]:/.min_vruntime
13504 Â 3% -100.0% 0 Â 0% sched_debug.cfs_rq[68]:/.blocked_load_avg
13378 Â 5% -100.0% 0 Â 0% sched_debug.cfs_rq[69]:/.tg_load_contrib
1287493 Â 1% -99.8% 1987 Â 5% sched_debug.cfs_rq[69]:/.tg_load_avg
13324 Â 5% -100.0% 0 Â 0% sched_debug.cfs_rq[69]:/.blocked_load_avg
0 Â 0% +Inf% 17 Â 48% sched_debug.cfs_rq[69]:/.load
16378018 Â 5% -74.9% 4104247 Â 4% sched_debug.cfs_rq[69]:/.min_vruntime
35482 Â 6% +26.9% 45034 Â 7% sched_debug.cfs_rq[69]:/.exec_clock
89 Â 34% -100.0% 0 Â 0% sched_debug.cfs_rq[69]:/.utilization_load_avg
65237 Â 4% +34.7% 87901 Â 3% sched_debug.cfs_rq[6]:/.exec_clock
6945 Â 9% -100.0% 0 Â 0% sched_debug.cfs_rq[6]:/.blocked_load_avg
1260608 Â 2% -99.8% 2120 Â 5% sched_debug.cfs_rq[6]:/.tg_load_avg
0 Â 0% +Inf% 3 Â 0% sched_debug.cfs_rq[6]:/.load
7046 Â 9% -100.0% 0 Â 0% sched_debug.cfs_rq[6]:/.tg_load_contrib
31032393 Â 4% -67.8% 9984141 Â 4% sched_debug.cfs_rq[6]:/.min_vruntime
16185513 Â 6% -74.7% 4088386 Â 4% sched_debug.cfs_rq[70]:/.min_vruntime
1289167 Â 1% -99.8% 1988 Â 5% sched_debug.cfs_rq[70]:/.tg_load_avg
14068 Â 1% -100.0% 0 Â 0% sched_debug.cfs_rq[70]:/.tg_load_contrib
74 Â 31% -100.0% 0 Â 0% sched_debug.cfs_rq[70]:/.utilization_load_avg
34993 Â 6% +28.5% 44959 Â 7% sched_debug.cfs_rq[70]:/.exec_clock
13985 Â 1% -100.0% 0 Â 0% sched_debug.cfs_rq[70]:/.blocked_load_avg
13431 Â 4% -100.0% 0 Â 0% sched_debug.cfs_rq[71]:/.tg_load_contrib
34807 Â 6% +29.8% 45176 Â 6% sched_debug.cfs_rq[71]:/.exec_clock
95 Â 39% -100.0% 0 Â 0% sched_debug.cfs_rq[71]:/.utilization_load_avg
1288733 Â 1% -99.8% 1993 Â 5% sched_debug.cfs_rq[71]:/.tg_load_avg
16070569 Â 6% -74.4% 4110886 Â 5% sched_debug.cfs_rq[71]:/.min_vruntime
13367 Â 4% -100.0% 0 Â 0% sched_debug.cfs_rq[71]:/.blocked_load_avg
90 Â 34% -100.0% 0 Â 0% sched_debug.cfs_rq[72]:/.utilization_load_avg
34683 Â 7% +31.7% 45667 Â 7% sched_debug.cfs_rq[72]:/.exec_clock
13512 Â 5% -100.0% 0 Â 0% sched_debug.cfs_rq[72]:/.blocked_load_avg
1286529 Â 1% -99.8% 1993 Â 5% sched_debug.cfs_rq[72]:/.tg_load_avg
15993091 Â 6% -74.4% 4093989 Â 4% sched_debug.cfs_rq[72]:/.min_vruntime
13576 Â 5% -100.0% 0 Â 0% sched_debug.cfs_rq[72]:/.tg_load_contrib
12533 Â 2% -100.0% 0 Â 0% sched_debug.cfs_rq[73]:/.tg_load_contrib
15963083 Â 6% -74.3% 4100627 Â 4% sched_debug.cfs_rq[73]:/.min_vruntime
34556 Â 7% +30.6% 45126 Â 7% sched_debug.cfs_rq[73]:/.exec_clock
12446 Â 2% -100.0% 0 Â 0% sched_debug.cfs_rq[73]:/.blocked_load_avg
1286434 Â 2% -99.8% 1995 Â 5% sched_debug.cfs_rq[73]:/.tg_load_avg
12633 Â 7% -100.0% 0 Â 0% sched_debug.cfs_rq[74]:/.blocked_load_avg
15968393 Â 6% -74.4% 4080116 Â 3% sched_debug.cfs_rq[74]:/.min_vruntime
1287548 Â 1% -99.8% 1999 Â 5% sched_debug.cfs_rq[74]:/.tg_load_avg
34553 Â 7% +30.1% 44965 Â 7% sched_debug.cfs_rq[74]:/.exec_clock
12692 Â 7% -100.0% 0 Â 0% sched_debug.cfs_rq[74]:/.tg_load_contrib
80 Â 21% -100.0% 0 Â 0% sched_debug.cfs_rq[74]:/.utilization_load_avg
15616 Â 8% -100.0% 0 Â 0% sched_debug.cfs_rq[75]:/.tg_load_contrib
15525 Â 8% -100.0% 0 Â 0% sched_debug.cfs_rq[75]:/.blocked_load_avg
1288073 Â 1% -99.8% 2003 Â 5% sched_debug.cfs_rq[75]:/.tg_load_avg
147 Â 20% -100.0% 0 Â 0% sched_debug.cfs_rq[75]:/.utilization_load_avg
110 Â 8% -100.0% 0 Â 0% sched_debug.cfs_rq[76]:/.utilization_load_avg
1287479 Â 1% -99.8% 1994 Â 5% sched_debug.cfs_rq[76]:/.tg_load_avg
15195 Â 13% -100.0% 0 Â 0% sched_debug.cfs_rq[76]:/.tg_load_contrib
1 Â 33% +833.3% 14 Â 15% sched_debug.cfs_rq[76]:/.runnable_load_avg
15107 Â 13% -100.0% 0 Â 0% sched_debug.cfs_rq[76]:/.blocked_load_avg
1287305 Â 1% -99.8% 1998 Â 5% sched_debug.cfs_rq[77]:/.tg_load_avg
137 Â 42% -100.0% 0 Â 0% sched_debug.cfs_rq[77]:/.utilization_load_avg
15832 Â 20% -100.0% 0 Â 0% sched_debug.cfs_rq[77]:/.blocked_load_avg
15911 Â 20% -100.0% 0 Â 0% sched_debug.cfs_rq[77]:/.tg_load_contrib
1286971 Â 1% -99.8% 2001 Â 5% sched_debug.cfs_rq[78]:/.tg_load_avg
14248 Â 14% -100.0% 0 Â 0% sched_debug.cfs_rq[78]:/.blocked_load_avg
14352 Â 14% -100.0% 0 Â 0% sched_debug.cfs_rq[78]:/.tg_load_contrib
111 Â 34% -100.0% 0 Â 0% sched_debug.cfs_rq[78]:/.utilization_load_avg
14411 Â 15% -100.0% 0 Â 0% sched_debug.cfs_rq[79]:/.tg_load_contrib
1288909 Â 1% -99.8% 2001 Â 5% sched_debug.cfs_rq[79]:/.tg_load_avg
75 Â 30% -100.0% 0 Â 0% sched_debug.cfs_rq[79]:/.utilization_load_avg
14338 Â 15% -100.0% 0 Â 0% sched_debug.cfs_rq[79]:/.blocked_load_avg
7923 Â 11% -100.0% 0 Â 0% sched_debug.cfs_rq[7]:/.tg_load_contrib
65375 Â 4% +34.1% 87657 Â 2% sched_debug.cfs_rq[7]:/.exec_clock
31157107 Â 4% -67.8% 10027453 Â 4% sched_debug.cfs_rq[7]:/.min_vruntime
1259259 Â 2% -99.8% 2118 Â 5% sched_debug.cfs_rq[7]:/.tg_load_avg
90 Â 25% -100.0% 0 Â 0% sched_debug.cfs_rq[7]:/.utilization_load_avg
7827 Â 11% -100.0% 0 Â 0% sched_debug.cfs_rq[7]:/.blocked_load_avg
1288387 Â 1% -99.8% 2003 Â 5% sched_debug.cfs_rq[80]:/.tg_load_avg
14455 Â 10% -100.0% 0 Â 0% sched_debug.cfs_rq[80]:/.blocked_load_avg
14541 Â 10% -100.0% 0 Â 0% sched_debug.cfs_rq[80]:/.tg_load_contrib
122 Â 35% -100.0% 0 Â 0% sched_debug.cfs_rq[80]:/.utilization_load_avg
1288733 Â 1% -99.8% 1994 Â 4% sched_debug.cfs_rq[81]:/.tg_load_avg
14921 Â 15% -100.0% 0 Â 0% sched_debug.cfs_rq[81]:/.tg_load_contrib
131 Â 16% -100.0% 0 Â 0% sched_debug.cfs_rq[81]:/.utilization_load_avg
14835 Â 15% -100.0% 0 Â 0% sched_debug.cfs_rq[81]:/.blocked_load_avg
13468 Â 10% -100.0% 0 Â 0% sched_debug.cfs_rq[82]:/.tg_load_contrib
116 Â 9% -100.0% 0 Â 0% sched_debug.cfs_rq[82]:/.utilization_load_avg
1 Â 34% +1020.0% 14 Â 13% sched_debug.cfs_rq[82]:/.runnable_load_avg
13402 Â 10% -100.0% 0 Â 0% sched_debug.cfs_rq[82]:/.blocked_load_avg
1289545 Â 2% -99.8% 1997 Â 4% sched_debug.cfs_rq[82]:/.tg_load_avg
1290465 Â 2% -99.8% 1990 Â 3% sched_debug.cfs_rq[83]:/.tg_load_avg
13372 Â 9% -100.0% 0 Â 0% sched_debug.cfs_rq[83]:/.blocked_load_avg
13427 Â 9% -100.0% 0 Â 0% sched_debug.cfs_rq[83]:/.tg_load_contrib
13994 Â 18% -100.0% 0 Â 0% sched_debug.cfs_rq[84]:/.tg_load_contrib
1288701 Â 2% -99.8% 1994 Â 3% sched_debug.cfs_rq[84]:/.tg_load_avg
13914 Â 18% -100.0% 0 Â 0% sched_debug.cfs_rq[84]:/.blocked_load_avg
1288295 Â 2% -99.8% 1997 Â 3% sched_debug.cfs_rq[85]:/.tg_load_avg
136 Â 25% -100.0% 0 Â 0% sched_debug.cfs_rq[85]:/.utilization_load_avg
14285 Â 12% -100.0% 0 Â 0% sched_debug.cfs_rq[85]:/.tg_load_contrib
14203 Â 12% -100.0% 0 Â 0% sched_debug.cfs_rq[85]:/.blocked_load_avg
14493 Â 16% -100.0% 0 Â 0% sched_debug.cfs_rq[86]:/.blocked_load_avg
1290351 Â 2% -99.8% 1999 Â 3% sched_debug.cfs_rq[86]:/.tg_load_avg
14582 Â 16% -100.0% 0 Â 0% sched_debug.cfs_rq[86]:/.tg_load_contrib
131 Â 25% -100.0% 0 Â 0% sched_debug.cfs_rq[86]:/.utilization_load_avg
1291995 Â 2% -99.8% 1995 Â 3% sched_debug.cfs_rq[87]:/.tg_load_avg
157 Â 14% -100.0% 0 Â 0% sched_debug.cfs_rq[87]:/.utilization_load_avg
13568 Â 19% -100.0% 0 Â 0% sched_debug.cfs_rq[87]:/.blocked_load_avg
13634 Â 19% -100.0% 0 Â 0% sched_debug.cfs_rq[87]:/.tg_load_contrib
13771 Â 14% -100.0% 0 Â 0% sched_debug.cfs_rq[88]:/.blocked_load_avg
1294612 Â 2% -99.8% 1999 Â 3% sched_debug.cfs_rq[88]:/.tg_load_avg
106 Â 37% -100.0% 0 Â 0% sched_debug.cfs_rq[88]:/.utilization_load_avg
13848 Â 14% -100.0% 0 Â 0% sched_debug.cfs_rq[88]:/.tg_load_contrib
14099 Â 12% -100.0% 0 Â 0% sched_debug.cfs_rq[89]:/.blocked_load_avg
14179 Â 12% -100.0% 0 Â 0% sched_debug.cfs_rq[89]:/.tg_load_contrib
1293496 Â 2% -99.8% 2000 Â 3% sched_debug.cfs_rq[89]:/.tg_load_avg
148 Â 30% -100.0% 0 Â 0% sched_debug.cfs_rq[89]:/.utilization_load_avg
53 Â 17% -100.0% 0 Â 0% sched_debug.cfs_rq[8]:/.utilization_load_avg
7008 Â 9% -100.0% 0 Â 0% sched_debug.cfs_rq[8]:/.tg_load_contrib
31125220 Â 4% -67.8% 10016250 Â 4% sched_debug.cfs_rq[8]:/.min_vruntime
6909 Â 9% -100.0% 0 Â 0% sched_debug.cfs_rq[8]:/.blocked_load_avg
65076 Â 4% +34.3% 87412 Â 3% sched_debug.cfs_rq[8]:/.exec_clock
1260552 Â 2% -99.8% 2116 Â 5% sched_debug.cfs_rq[8]:/.tg_load_avg
15070 Â 10% -100.0% 0 Â 0% sched_debug.cfs_rq[90]:/.tg_load_contrib
14993 Â 10% -100.0% 0 Â 0% sched_debug.cfs_rq[90]:/.blocked_load_avg
160 Â 25% -100.0% 0 Â 0% sched_debug.cfs_rq[90]:/.utilization_load_avg
18098859 Â 8% -80.9% 3458486 Â 22% sched_debug.cfs_rq[90]:/.min_vruntime
1293608 Â 2% -99.8% 2001 Â 3% sched_debug.cfs_rq[90]:/.tg_load_avg
1293253 Â 2% -99.8% 2000 Â 3% sched_debug.cfs_rq[91]:/.tg_load_avg
13313 Â 15% -100.0% 0 Â 0% sched_debug.cfs_rq[91]:/.blocked_load_avg
181 Â 31% -100.0% 0 Â 0% sched_debug.cfs_rq[91]:/.utilization_load_avg
1 Â 24% +600.0% 12 Â 12% sched_debug.cfs_rq[91]:/.runnable_load_avg
13398 Â 15% -100.0% 0 Â 0% sched_debug.cfs_rq[91]:/.tg_load_contrib
17441462 Â 8% -80.3% 3430296 Â 22% sched_debug.cfs_rq[91]:/.min_vruntime
106 Â 23% -100.0% 0 Â 0% sched_debug.cfs_rq[92]:/.utilization_load_avg
13076 Â 14% -100.0% 0 Â 0% sched_debug.cfs_rq[92]:/.blocked_load_avg
1291860 Â 2% -99.8% 1999 Â 3% sched_debug.cfs_rq[92]:/.tg_load_avg
13169 Â 14% -100.0% 0 Â 0% sched_debug.cfs_rq[92]:/.tg_load_contrib
17098211 Â 8% -79.7% 3463238 Â 23% sched_debug.cfs_rq[92]:/.min_vruntime
16849249 Â 9% -79.6% 3442554 Â 23% sched_debug.cfs_rq[93]:/.min_vruntime
1292421 Â 2% -99.8% 1997 Â 3% sched_debug.cfs_rq[93]:/.tg_load_avg
13756 Â 18% -100.0% 0 Â 0% sched_debug.cfs_rq[93]:/.tg_load_contrib
153 Â 38% -100.0% 0 Â 0% sched_debug.cfs_rq[93]:/.utilization_load_avg
13669 Â 18% -100.0% 0 Â 0% sched_debug.cfs_rq[93]:/.blocked_load_avg
16554257 Â 9% -79.2% 3435110 Â 23% sched_debug.cfs_rq[94]:/.min_vruntime
13283 Â 18% -100.0% 0 Â 0% sched_debug.cfs_rq[94]:/.tg_load_contrib
1293431 Â 2% -99.8% 1996 Â 3% sched_debug.cfs_rq[94]:/.tg_load_avg
13226 Â 18% -100.0% 0 Â 0% sched_debug.cfs_rq[94]:/.blocked_load_avg
13186 Â 12% -100.0% 0 Â 0% sched_debug.cfs_rq[95]:/.tg_load_contrib
121 Â 31% -100.0% 0 Â 0% sched_debug.cfs_rq[95]:/.utilization_load_avg
1294318 Â 2% -99.8% 1997 Â 3% sched_debug.cfs_rq[95]:/.tg_load_avg
13125 Â 12% -100.0% 0 Â 0% sched_debug.cfs_rq[95]:/.blocked_load_avg
16653435 Â 8% -79.2% 3471376 Â 22% sched_debug.cfs_rq[95]:/.min_vruntime
17113324 Â 7% -79.6% 3484867 Â 23% sched_debug.cfs_rq[96]:/.min_vruntime
137 Â 23% -100.0% 0 Â 0% sched_debug.cfs_rq[96]:/.utilization_load_avg
1293535 Â 2% -99.8% 1996 Â 3% sched_debug.cfs_rq[96]:/.tg_load_avg
14067 Â 11% -100.0% 0 Â 0% sched_debug.cfs_rq[96]:/.tg_load_contrib
13983 Â 11% -100.0% 0 Â 0% sched_debug.cfs_rq[96]:/.blocked_load_avg
1294425 Â 2% -99.8% 1993 Â 3% sched_debug.cfs_rq[97]:/.tg_load_avg
13713 Â 14% -100.0% 0 Â 0% sched_debug.cfs_rq[97]:/.tg_load_contrib
13603 Â 14% -100.0% 0 Â 0% sched_debug.cfs_rq[97]:/.blocked_load_avg
16807656 Â 7% -79.3% 3476910 Â 23% sched_debug.cfs_rq[97]:/.min_vruntime
121 Â 29% -100.0% 0 Â 0% sched_debug.cfs_rq[97]:/.utilization_load_avg
13394 Â 7% -100.0% 0 Â 0% sched_debug.cfs_rq[98]:/.blocked_load_avg
116 Â 37% -100.0% 0 Â 0% sched_debug.cfs_rq[98]:/.utilization_load_avg
1293736 Â 2% -99.8% 1985 Â 2% sched_debug.cfs_rq[98]:/.tg_load_avg
13487 Â 7% -100.0% 0 Â 0% sched_debug.cfs_rq[98]:/.tg_load_contrib
16790802 Â 7% -79.5% 3450066 Â 22% sched_debug.cfs_rq[98]:/.min_vruntime
16781556 Â 7% -79.5% 3432159 Â 23% sched_debug.cfs_rq[99]:/.min_vruntime
13144 Â 10% -100.0% 0 Â 0% sched_debug.cfs_rq[99]:/.tg_load_contrib
110 Â 29% -100.0% 0 Â 0% sched_debug.cfs_rq[99]:/.utilization_load_avg
13085 Â 10% -100.0% 0 Â 0% sched_debug.cfs_rq[99]:/.blocked_load_avg
1295505 Â 2% -99.8% 1980 Â 2% sched_debug.cfs_rq[99]:/.tg_load_avg
30828048 Â 4% -67.7% 9946390 Â 5% sched_debug.cfs_rq[9]:/.min_vruntime
64554 Â 4% +35.2% 87301 Â 2% sched_debug.cfs_rq[9]:/.exec_clock
7642 Â 7% -100.0% 0 Â 0% sched_debug.cfs_rq[9]:/.tg_load_contrib
0 Â 0% +Inf% 17 Â 47% sched_debug.cfs_rq[9]:/.load
1262693 Â 2% -99.8% 2113 Â 5% sched_debug.cfs_rq[9]:/.tg_load_avg
1 Â 24% +657.1% 13 Â 13% sched_debug.cfs_rq[9]:/.runnable_load_avg
7534 Â 7% -100.0% 0 Â 0% sched_debug.cfs_rq[9]:/.blocked_load_avg
2 Â 30% -54.5% 1 Â 34% sched_debug.cpu#0.cpu_load[4]
1067783 Â 3% +30.4% 1391905 Â 4% sched_debug.cpu#0.sched_count
1030847 Â 3% +29.9% 1338797 Â 4% sched_debug.cpu#0.nr_switches
500455 Â 3% +23.9% 619920 Â 5% sched_debug.cpu#0.sched_goidle
396349 Â 5% +44.4% 572387 Â 11% sched_debug.cpu#0.ttwu_count
8659 Â 7% +50.4% 13023 Â 18% sched_debug.cpu#0.ttwu_local
195437 Â 1% +11.3% 217539 Â 2% sched_debug.cpu#0.nr_load_updates
1102888 Â 2% +22.0% 1345056 Â 5% sched_debug.cpu#1.sched_count
421072 Â 4% +33.0% 560086 Â 11% sched_debug.cpu#1.ttwu_count
1080649 Â 3% +21.2% 1309912 Â 5% sched_debug.cpu#1.nr_switches
529946 Â 3% +14.4% 606226 Â 5% sched_debug.cpu#1.sched_goidle
5373 Â 9% +96.4% 10552 Â 23% sched_debug.cpu#1.ttwu_local
403029 Â 5% +39.0% 560131 Â 9% sched_debug.cpu#10.ttwu_count
1056794 Â 3% +24.0% 1310867 Â 4% sched_debug.cpu#10.nr_switches
4532 Â 13% +117.0% 9835 Â 27% sched_debug.cpu#10.ttwu_local
1077929 Â 3% +24.9% 1346113 Â 4% sched_debug.cpu#10.sched_count
517885 Â 3% +17.1% 606556 Â 4% sched_debug.cpu#10.sched_goidle
365298 Â 5% +131.0% 843923 Â 5% sched_debug.cpu#100.avg_idle
456436 Â 4% -34.6% 298496 Â 20% sched_debug.cpu#100.sched_count
159012 Â 1% -43.7% 89468 Â 12% sched_debug.cpu#100.nr_load_updates
10489 Â 14% +365.2% 48797 Â 39% sched_debug.cpu#100.nr_uninterruptible
1906 Â 10% +223.8% 6173 Â 25% sched_debug.cpu#100.ttwu_local
218937 Â 4% -60.1% 87357 Â 16% sched_debug.cpu#100.sched_goidle
449700 Â 4% -35.0% 292116 Â 20% sched_debug.cpu#100.nr_switches
14022 Â 13% -41.9% 8153 Â 49% sched_debug.cpu#101.curr->pid
1900 Â 11% +226.6% 6206 Â 24% sched_debug.cpu#101.ttwu_local
362789 Â 8% +136.1% 856706 Â 3% sched_debug.cpu#101.avg_idle
453763 Â 4% -35.8% 291315 Â 20% sched_debug.cpu#101.nr_switches
159260 Â 1% -44.0% 89177 Â 12% sched_debug.cpu#101.nr_load_updates
8 Â 40% -84.4% 1 Â 34% sched_debug.cpu#101.cpu_load[3]
220953 Â 4% -60.4% 87391 Â 17% sched_debug.cpu#101.sched_goidle
18 Â 38% -86.7% 2 Â 20% sched_debug.cpu#101.cpu_load[1]
11 Â 37% -84.8% 1 Â 24% sched_debug.cpu#101.cpu_load[2]
10581 Â 15% +354.5% 48092 Â 37% sched_debug.cpu#101.nr_uninterruptible
33 Â 32% -90.3% 3 Â 13% sched_debug.cpu#101.cpu_load[0]
460554 Â 4% -35.5% 297013 Â 20% sched_debug.cpu#101.sched_count
381257 Â 10% +126.7% 864487 Â 2% sched_debug.cpu#102.avg_idle
457120 Â 5% -36.7% 289202 Â 20% sched_debug.cpu#102.nr_switches
464020 Â 5% -36.4% 294946 Â 20% sched_debug.cpu#102.sched_count
159614 Â 1% -44.4% 88729 Â 12% sched_debug.cpu#102.nr_load_updates
222593 Â 4% -61.2% 86471 Â 16% sched_debug.cpu#102.sched_goidle
7 Â 24% -83.9% 1 Â 34% sched_debug.cpu#102.cpu_load[3]
1896 Â 12% +226.1% 6185 Â 26% sched_debug.cpu#102.ttwu_local
8 Â 39% -76.5% 2 Â 0% sched_debug.cpu#102.cpu_load[2]
10714 Â 14% +348.7% 48080 Â 38% sched_debug.cpu#102.nr_uninterruptible
160961 Â 1% -44.7% 88963 Â 12% sched_debug.cpu#103.nr_load_updates
18 Â 22% -86.7% 2 Â 34% sched_debug.cpu#103.cpu_load[1]
29 Â 30% -88.8% 3 Â 33% sched_debug.cpu#103.cpu_load[0]
350413 Â 4% +147.0% 865509 Â 3% sched_debug.cpu#103.avg_idle
474428 Â 4% -37.3% 297374 Â 20% sched_debug.cpu#103.sched_count
11193 Â 15% +337.1% 48929 Â 37% sched_debug.cpu#103.nr_uninterruptible
1963 Â 10% +216.7% 6217 Â 25% sched_debug.cpu#103.ttwu_local
467221 Â 4% -37.7% 290912 Â 20% sched_debug.cpu#103.nr_switches
9 Â 38% -86.5% 1 Â 34% sched_debug.cpu#103.cpu_load[3]
227615 Â 4% -61.9% 86663 Â 16% sched_debug.cpu#103.sched_goidle
13 Â 29% -84.9% 2 Â 35% sched_debug.cpu#103.cpu_load[2]
231531 Â 4% -62.6% 86600 Â 16% sched_debug.cpu#104.sched_goidle
357971 Â 12% +136.2% 845542 Â 5% sched_debug.cpu#104.avg_idle
1974 Â 13% +210.5% 6130 Â 25% sched_debug.cpu#104.ttwu_local
475014 Â 4% -38.6% 291706 Â 20% sched_debug.cpu#104.nr_switches
161873 Â 1% -45.0% 89005 Â 12% sched_debug.cpu#104.nr_load_updates
11493 Â 14% +328.7% 49274 Â 37% sched_debug.cpu#104.nr_uninterruptible
482004 Â 4% -38.3% 297512 Â 20% sched_debug.cpu#104.sched_count
376857 Â 1% +135.5% 887605 Â 3% sched_debug.cpu#105.avg_idle
479657 Â 5% -52.5% 228014 Â 27% sched_debug.cpu#105.nr_switches
233823 Â 5% -68.9% 72804 Â 22% sched_debug.cpu#105.sched_goidle
486675 Â 5% -52.2% 232670 Â 27% sched_debug.cpu#105.sched_count
11520 Â 21% +181.5% 32428 Â 45% sched_debug.cpu#105.nr_uninterruptible
7 Â 45% -82.8% 1 Â 34% sched_debug.cpu#105.cpu_load[3]
252967 Â 3% -43.0% 144291 Â 34% sched_debug.cpu#105.ttwu_count
164221 Â 0% -54.7% 74406 Â 18% sched_debug.cpu#105.nr_load_updates
16589 Â 42% -55.4% 7396 Â 35% sched_debug.cpu#106.curr->pid
468655 Â 6% -51.1% 229029 Â 26% sched_debug.cpu#106.nr_switches
162419 Â 0% -53.8% 75000 Â 16% sched_debug.cpu#106.nr_load_updates
475888 Â 6% -50.8% 233905 Â 26% sched_debug.cpu#106.sched_count
7 Â 45% -79.3% 1 Â 33% sched_debug.cpu#106.cpu_load[2]
246487 Â 3% -43.0% 140405 Â 36% sched_debug.cpu#106.ttwu_count
11120 Â 19% +192.1% 32481 Â 43% sched_debug.cpu#106.nr_uninterruptible
228269 Â 6% -67.9% 73274 Â 21% sched_debug.cpu#106.sched_goidle
406908 Â 7% +112.6% 865001 Â 7% sched_debug.cpu#106.avg_idle
14380 Â 31% -54.4% 6560 Â 46% sched_debug.cpu#107.curr->pid
370251 Â 8% +130.8% 854462 Â 5% sched_debug.cpu#107.avg_idle
161470 Â 1% -53.2% 75636 Â 16% sched_debug.cpu#107.nr_load_updates
460610 Â 5% -49.8% 231031 Â 26% sched_debug.cpu#107.nr_switches
18 Â 47% -86.7% 2 Â 20% sched_debug.cpu#107.cpu_load[1]
10829 Â 18% +202.1% 32719 Â 44% sched_debug.cpu#107.nr_uninterruptible
224331 Â 5% -67.0% 74073 Â 21% sched_debug.cpu#107.sched_goidle
8 Â 29% -88.6% 1 Â 0% sched_debug.cpu#107.cpu_load[4]
14 Â 34% -87.7% 1 Â 24% sched_debug.cpu#107.cpu_load[2]
242403 Â 3% -41.5% 141720 Â 36% sched_debug.cpu#107.ttwu_count
11 Â 32% -91.1% 1 Â 0% sched_debug.cpu#107.cpu_load[3]
468411 Â 6% -49.7% 235417 Â 26% sched_debug.cpu#107.sched_count
8 Â 21% -84.8% 1 Â 34% sched_debug.cpu#108.cpu_load[3]
380484 Â 3% +126.1% 860361 Â 4% sched_debug.cpu#108.avg_idle
1938 Â 11% +144.4% 4736 Â 34% sched_debug.cpu#108.ttwu_local
220306 Â 5% -66.9% 72929 Â 21% sched_debug.cpu#108.sched_goidle
9 Â 31% -81.6% 1 Â 24% sched_debug.cpu#108.cpu_load[2]
237165 Â 3% -39.2% 144275 Â 33% sched_debug.cpu#108.ttwu_count
10533 Â 18% +213.2% 32988 Â 44% sched_debug.cpu#108.nr_uninterruptible
459326 Â 5% -49.1% 233641 Â 27% sched_debug.cpu#108.sched_count
160159 Â 1% -53.4% 74576 Â 17% sched_debug.cpu#108.nr_load_updates
452591 Â 5% -49.4% 229189 Â 26% sched_debug.cpu#108.nr_switches
159796 Â 0% -53.5% 74381 Â 17% sched_debug.cpu#109.nr_load_updates
235027 Â 3% -40.9% 138966 Â 37% sched_debug.cpu#109.ttwu_count
9 Â 13% -83.3% 1 Â 33% sched_debug.cpu#109.cpu_load[2]
11 Â 17% -82.2% 2 Â 35% sched_debug.cpu#109.cpu_load[1]
377549 Â 9% +127.3% 858083 Â 5% sched_debug.cpu#109.avg_idle
219624 Â 4% -66.8% 72964 Â 22% sched_debug.cpu#109.sched_goidle
10595 Â 17% +212.3% 33088 Â 43% sched_debug.cpu#109.nr_uninterruptible
458219 Â 5% -49.0% 233915 Â 27% sched_debug.cpu#109.sched_count
7 Â 10% -83.9% 1 Â 34% sched_debug.cpu#109.cpu_load[3]
451458 Â 5% -49.3% 228945 Â 27% sched_debug.cpu#109.nr_switches
1074502 Â 3% +25.3% 1346508 Â 4% sched_debug.cpu#11.sched_count
1053435 Â 3% +24.5% 1311348 Â 4% sched_debug.cpu#11.nr_switches
516178 Â 3% +17.5% 606755 Â 4% sched_debug.cpu#11.sched_goidle
399968 Â 5% +39.7% 558833 Â 9% sched_debug.cpu#11.ttwu_count
314120 Â 5% +9.2% 342906 Â 4% sched_debug.cpu#11.avg_idle
4418 Â 12% +126.2% 9995 Â 29% sched_debug.cpu#11.ttwu_local
215980 Â 4% -66.2% 72935 Â 21% sched_debug.cpu#110.sched_goidle
450583 Â 5% -48.4% 232685 Â 26% sched_debug.cpu#110.sched_count
10218 Â 17% +217.1% 32403 Â 45% sched_debug.cpu#110.nr_uninterruptible
236411 Â 4% -40.1% 141614 Â 35% sched_debug.cpu#110.ttwu_count
1902 Â 11% +141.6% 4596 Â 36% sched_debug.cpu#110.ttwu_local
361093 Â 2% +142.4% 875352 Â 6% sched_debug.cpu#110.avg_idle
443846 Â 5% -48.6% 228167 Â 26% sched_debug.cpu#110.nr_switches
158751 Â 1% -52.9% 74729 Â 16% sched_debug.cpu#110.nr_load_updates
10236 Â 17% +224.9% 33257 Â 42% sched_debug.cpu#111.nr_uninterruptible
441970 Â 5% -48.2% 228980 Â 26% sched_debug.cpu#111.nr_switches
448690 Â 5% -48.0% 233442 Â 26% sched_debug.cpu#111.sched_count
7 Â 38% -87.1% 1 Â 0% sched_debug.cpu#111.cpu_load[3]
234647 Â 3% -39.7% 141577 Â 35% sched_debug.cpu#111.ttwu_count
158578 Â 0% -52.9% 74649 Â 17% sched_debug.cpu#111.nr_load_updates
362093 Â 8% +139.5% 867085 Â 7% sched_debug.cpu#111.avg_idle
214924 Â 5% -66.1% 72865 Â 22% sched_debug.cpu#111.sched_goidle
158157 Â 0% -52.9% 74489 Â 17% sched_debug.cpu#112.nr_load_updates
440397 Â 5% -48.0% 228865 Â 25% sched_debug.cpu#112.nr_switches
365887 Â 7% +133.1% 852963 Â 5% sched_debug.cpu#112.avg_idle
447069 Â 5% -47.7% 233692 Â 26% sched_debug.cpu#112.sched_count
1900 Â 10% +141.1% 4582 Â 37% sched_debug.cpu#112.ttwu_local
12 Â 31% -85.7% 1 Â 47% sched_debug.cpu#112.cpu_load[2]
214176 Â 5% -66.1% 72600 Â 21% sched_debug.cpu#112.sched_goidle
10159 Â 18% +228.7% 33392 Â 41% sched_debug.cpu#112.nr_uninterruptible
232979 Â 3% -39.8% 140142 Â 35% sched_debug.cpu#112.ttwu_count
232848 Â 3% -39.6% 140551 Â 36% sched_debug.cpu#113.ttwu_count
5 Â 40% -80.0% 1 Â 0% sched_debug.cpu#113.cpu_load[3]
447370 Â 5% -47.4% 235538 Â 26% sched_debug.cpu#113.sched_count
10181 Â 19% +226.5% 33244 Â 43% sched_debug.cpu#113.nr_uninterruptible
1878 Â 11% +155.2% 4793 Â 35% sched_debug.cpu#113.ttwu_local
214289 Â 5% -65.7% 73398 Â 21% sched_debug.cpu#113.sched_goidle
376183 Â 8% +130.9% 868488 Â 4% sched_debug.cpu#113.avg_idle
158046 Â 0% -52.6% 74970 Â 16% sched_debug.cpu#113.nr_load_updates
440528 Â 5% -47.8% 230099 Â 26% sched_debug.cpu#113.nr_switches
157984 Â 0% -52.8% 74585 Â 17% sched_debug.cpu#114.nr_load_updates
389341 Â 11% +126.6% 882130 Â 4% sched_debug.cpu#114.avg_idle
446361 Â 5% -47.6% 234059 Â 27% sched_debug.cpu#114.sched_count
10131 Â 18% +230.7% 33508 Â 44% sched_debug.cpu#114.nr_uninterruptible
1868 Â 11% +145.1% 4578 Â 37% sched_debug.cpu#114.ttwu_local
231072 Â 3% -39.7% 139256 Â 36% sched_debug.cpu#114.ttwu_count
439650 Â 5% -47.7% 229733 Â 26% sched_debug.cpu#114.nr_switches
213858 Â 4% -65.8% 73221 Â 21% sched_debug.cpu#114.sched_goidle
10257 Â 19% +223.6% 33189 Â 45% sched_debug.cpu#115.nr_uninterruptible
215333 Â 5% -66.1% 73048 Â 21% sched_debug.cpu#115.sched_goidle
158530 Â 0% -52.7% 74953 Â 16% sched_debug.cpu#115.nr_load_updates
373651 Â 3% +139.0% 892874 Â 6% sched_debug.cpu#115.avg_idle
1870 Â 11% +148.4% 4645 Â 37% sched_debug.cpu#115.ttwu_local
449557 Â 5% -47.8% 234775 Â 26% sched_debug.cpu#115.sched_count
235907 Â 3% -39.4% 142887 Â 36% sched_debug.cpu#115.ttwu_count
11 Â 48% -89.1% 1 Â 34% sched_debug.cpu#115.cpu_load[3]
442695 Â 5% -48.0% 230068 Â 26% sched_debug.cpu#115.nr_switches
377458 Â 5% +130.2% 869008 Â 6% sched_debug.cpu#116.avg_idle
158733 Â 0% -52.8% 74844 Â 16% sched_debug.cpu#116.nr_load_updates
217431 Â 5% -66.5% 72782 Â 21% sched_debug.cpu#116.sched_goidle
236350 Â 3% -39.2% 143794 Â 34% sched_debug.cpu#116.ttwu_count
11 Â 46% -88.6% 1 Â 34% sched_debug.cpu#116.cpu_load[3]
446682 Â 5% -48.6% 229763 Â 26% sched_debug.cpu#116.nr_switches
453332 Â 5% -48.2% 234604 Â 26% sched_debug.cpu#116.sched_count
23 Â 49% -88.2% 2 Â 30% sched_debug.cpu#116.cpu_load[1]
33 Â 30% -91.0% 3 Â 33% sched_debug.cpu#116.cpu_load[0]
15318 Â 5% -37.5% 9566 Â 37% sched_debug.cpu#116.curr->pid
10313 Â 18% +223.4% 33352 Â 43% sched_debug.cpu#116.nr_uninterruptible
219393 Â 5% -66.9% 72645 Â 21% sched_debug.cpu#117.sched_goidle
10507 Â 19% +217.5% 33362 Â 44% sched_debug.cpu#117.nr_uninterruptible
354700 Â 8% +147.6% 878317 Â 3% sched_debug.cpu#117.avg_idle
9 Â 46% -82.1% 1 Â 47% sched_debug.cpu#117.cpu_load[2]
159100 Â 0% -53.1% 74634 Â 17% sched_debug.cpu#117.nr_load_updates
240074 Â 3% -40.1% 143847 Â 35% sched_debug.cpu#117.ttwu_count
1926 Â 10% +144.1% 4702 Â 35% sched_debug.cpu#117.ttwu_local
0 Â 0% +Inf% 1 Â 0% sched_debug.cpu#117.nr_running
450850 Â 5% -49.1% 229588 Â 26% sched_debug.cpu#117.nr_switches
458037 Â 5% -48.9% 234019 Â 26% sched_debug.cpu#117.sched_count
363475 Â 2% +135.4% 855586 Â 6% sched_debug.cpu#118.avg_idle
241373 Â 3% -40.3% 144168 Â 35% sched_debug.cpu#118.ttwu_count
465668 Â 5% -49.7% 234055 Â 26% sched_debug.cpu#118.sched_count
458922 Â 5% -50.1% 229212 Â 25% sched_debug.cpu#118.nr_switches
10907 Â 19% +203.9% 33151 Â 43% sched_debug.cpu#118.nr_uninterruptible
160228 Â 0% -53.5% 74553 Â 16% sched_debug.cpu#118.nr_load_updates
223357 Â 5% -67.5% 72534 Â 20% sched_debug.cpu#118.sched_goidle
11282 Â 20% +191.8% 32926 Â 43% sched_debug.cpu#119.nr_uninterruptible
365829 Â 5% +139.7% 877017 Â 5% sched_debug.cpu#119.avg_idle
228345 Â 5% -68.3% 72484 Â 21% sched_debug.cpu#119.sched_goidle
1952 Â 8% +142.6% 4736 Â 36% sched_debug.cpu#119.ttwu_local
161450 Â 0% -54.0% 74215 Â 16% sched_debug.cpu#119.nr_load_updates
476317 Â 5% -50.9% 233762 Â 26% sched_debug.cpu#119.sched_count
468835 Â 5% -51.1% 229406 Â 26% sched_debug.cpu#119.nr_switches
245834 Â 2% -42.4% 141647 Â 36% sched_debug.cpu#119.ttwu_count
1069632 Â 3% +25.9% 1346347 Â 4% sched_debug.cpu#12.sched_count
4492 Â 10% +126.0% 10153 Â 25% sched_debug.cpu#12.ttwu_local
0 Â 0% +Inf% 2 Â 34% sched_debug.cpu#12.load
512665 Â 3% +18.4% 606896 Â 4% sched_debug.cpu#12.sched_goidle
1046593 Â 3% +25.4% 1312232 Â 4% sched_debug.cpu#12.nr_switches
398917 Â 5% +40.0% 558290 Â 9% sched_debug.cpu#12.ttwu_count
397758 Â 5% +41.8% 563965 Â 10% sched_debug.cpu#13.ttwu_count
4605 Â 10% +118.3% 10054 Â 29% sched_debug.cpu#13.ttwu_local
1066964 Â 3% +26.5% 1349215 Â 4% sched_debug.cpu#13.sched_count
512519 Â 3% +18.8% 608637 Â 4% sched_debug.cpu#13.sched_goidle
1046422 Â 3% +25.7% 1315144 Â 4% sched_debug.cpu#13.nr_switches
1063482 Â 3% +27.2% 1352562 Â 4% sched_debug.cpu#14.sched_count
1042319 Â 4% +26.4% 1317288 Â 4% sched_debug.cpu#14.nr_switches
2 Â 30% -54.5% 1 Â 34% sched_debug.cpu#14.cpu_load[4]
395972 Â 5% +43.1% 566604 Â 10% sched_debug.cpu#14.ttwu_count
4417 Â 12% +130.6% 10185 Â 33% sched_debug.cpu#14.ttwu_local
510493 Â 4% +19.4% 609573 Â 4% sched_debug.cpu#14.sched_goidle
316104 Â 11% +102.9% 641422 Â 30% sched_debug.cpu#15.avg_idle
5655 Â 26% +47.6% 8349 Â 4% sched_debug.cpu#16.ttwu_local
304298 Â 6% +109.3% 636995 Â 30% sched_debug.cpu#16.avg_idle
5317 Â 12% +46.5% 7791 Â 6% sched_debug.cpu#17.ttwu_local
303838 Â 6% +108.3% 632990 Â 32% sched_debug.cpu#17.avg_idle
5213 Â 17% +52.5% 7947 Â 8% sched_debug.cpu#18.ttwu_local
297224 Â 7% +114.0% 636143 Â 31% sched_debug.cpu#18.avg_idle
5325 Â 12% +59.1% 8475 Â 5% sched_debug.cpu#19.ttwu_local
322255 Â 10% +95.1% 628732 Â 33% sched_debug.cpu#19.avg_idle
1111501 Â 2% +17.9% 1310937 Â 5% sched_debug.cpu#2.nr_switches
4788 Â 13% +105.3% 9830 Â 25% sched_debug.cpu#2.ttwu_local
1131111 Â 2% +19.0% 1345850 Â 5% sched_debug.cpu#2.sched_count
545588 Â 2% +11.2% 606775 Â 5% sched_debug.cpu#2.sched_goidle
423779 Â 4% +31.3% 556589 Â 10% sched_debug.cpu#2.ttwu_count
291025 Â 6% +114.3% 623791 Â 31% sched_debug.cpu#20.avg_idle
4983 Â 5% +69.8% 8460 Â 7% sched_debug.cpu#20.ttwu_local
4751 Â 6% +84.1% 8748 Â 4% sched_debug.cpu#21.ttwu_local
318379 Â 2% +99.5% 635089 Â 33% sched_debug.cpu#21.avg_idle
4822 Â 7% +67.5% 8078 Â 8% sched_debug.cpu#22.ttwu_local
328942 Â 8% +89.1% 622099 Â 33% sched_debug.cpu#23.avg_idle
4753 Â 8% +70.6% 8108 Â 11% sched_debug.cpu#23.ttwu_local
5070 Â 4% +62.7% 8251 Â 9% sched_debug.cpu#24.ttwu_local
311654 Â 5% +101.4% 627764 Â 34% sched_debug.cpu#24.avg_idle
11 Â 36% -75.0% 2 Â 30% sched_debug.cpu#25.cpu_load[2]
8 Â 29% -65.6% 2 Â 30% sched_debug.cpu#25.cpu_load[3]
4735 Â 8% +66.4% 7878 Â 8% sched_debug.cpu#25.ttwu_local
334745 Â 4% +89.2% 633318 Â 32% sched_debug.cpu#25.avg_idle
312300 Â 9% +108.0% 649565 Â 31% sched_debug.cpu#26.avg_idle
3 Â 24% -64.3% 1 Â 34% sched_debug.cpu#26.cpu_load[4]
5139 Â 11% +51.4% 7782 Â 8% sched_debug.cpu#26.ttwu_local
1 Â 47% +100.0% 3 Â 14% sched_debug.cpu#27.cpu_load[1]
4972 Â 9% +57.5% 7828 Â 12% sched_debug.cpu#27.ttwu_local
339666 Â 13% +87.2% 635981 Â 31% sched_debug.cpu#27.avg_idle
1 Â 34% +140.0% 3 Â 23% sched_debug.cpu#27.cpu_load[0]
5120 Â 12% +59.9% 8189 Â 9% sched_debug.cpu#28.ttwu_local
326310 Â 5% +96.9% 642647 Â 31% sched_debug.cpu#28.avg_idle
5624 Â 18% +50.7% 8479 Â 6% sched_debug.cpu#29.ttwu_local
317361 Â 7% +95.0% 618915 Â 35% sched_debug.cpu#29.avg_idle
1 Â 0% +375.0% 4 Â 9% sched_debug.cpu#3.cpu_load[0]
543732 Â 2% +11.7% 607295 Â 5% sched_debug.cpu#3.sched_goidle
1133509 Â 2% +18.8% 1347015 Â 4% sched_debug.cpu#3.sched_count
419045 Â 5% +32.6% 555748 Â 10% sched_debug.cpu#3.ttwu_count
1108080 Â 2% +18.4% 1312225 Â 4% sched_debug.cpu#3.nr_switches
4586 Â 10% +113.2% 9777 Â 25% sched_debug.cpu#3.ttwu_local
4814 Â 13% +115.8% 10391 Â 20% sched_debug.cpu#30.ttwu_local
4956 Â 12% +116.4% 10725 Â 17% sched_debug.cpu#31.ttwu_local
4822 Â 13% +100.7% 9676 Â 14% sched_debug.cpu#32.ttwu_local
1 Â 0% +300.0% 4 Â 30% sched_debug.cpu#32.cpu_load[0]
3 Â 14% -64.3% 1 Â 34% sched_debug.cpu#33.cpu_load[4]
5034 Â 17% +93.5% 9741 Â 14% sched_debug.cpu#33.ttwu_local
1 Â 33% +133.3% 3 Â 14% sched_debug.cpu#34.cpu_load[2]
4908 Â 15% +96.4% 9637 Â 12% sched_debug.cpu#34.ttwu_local
5062 Â 14% +89.2% 9576 Â 16% sched_debug.cpu#35.ttwu_local
4738 Â 15% +108.7% 9892 Â 15% sched_debug.cpu#36.ttwu_local
315129 Â 9% +55.5% 490026 Â 17% sched_debug.cpu#37.avg_idle
4831 Â 15% +107.7% 10032 Â 12% sched_debug.cpu#37.ttwu_local
331506 Â 7% +40.9% 467107 Â 16% sched_debug.cpu#38.avg_idle
4738 Â 13% +125.1% 10668 Â 9% sched_debug.cpu#38.ttwu_local
4 Â 25% -58.8% 1 Â 24% sched_debug.cpu#39.cpu_load[4]
4788 Â 14% +108.3% 9975 Â 22% sched_debug.cpu#39.ttwu_local
541207 Â 2% +13.1% 611853 Â 5% sched_debug.cpu#4.sched_goidle
1103021 Â 2% +19.8% 1321365 Â 5% sched_debug.cpu#4.nr_switches
4589 Â 10% +111.5% 9707 Â 22% sched_debug.cpu#4.ttwu_local
1122810 Â 2% +20.8% 1355960 Â 5% sched_debug.cpu#4.sched_count
414454 Â 5% +32.3% 548289 Â 10% sched_debug.cpu#4.ttwu_count
4792 Â 16% +106.2% 9883 Â 19% sched_debug.cpu#40.ttwu_local
4816 Â 15% +110.7% 10147 Â 12% sched_debug.cpu#41.ttwu_local
4735 Â 15% +119.3% 10384 Â 23% sched_debug.cpu#42.ttwu_local
4982 Â 37% +74.9% 8713 Â 14% sched_debug.cpu#42.curr->pid
1 Â 34% +140.0% 3 Â 33% sched_debug.cpu#43.cpu_load[0]
4756 Â 14% +101.0% 9560 Â 17% sched_debug.cpu#43.ttwu_local
1 Â 34% +160.0% 3 Â 13% sched_debug.cpu#43.cpu_load[1]
322258 Â 11% +43.5% 462420 Â 16% sched_debug.cpu#43.avg_idle
325887 Â 4% +43.4% 467240 Â 18% sched_debug.cpu#44.avg_idle
4814 Â 14% +112.4% 10226 Â 11% sched_debug.cpu#44.ttwu_local
5017 Â 8% +72.0% 8628 Â 13% sched_debug.cpu#45.ttwu_local
323613 Â 9% +61.2% 521795 Â 24% sched_debug.cpu#45.avg_idle
212655 Â 0% -29.2% 150525 Â 21% sched_debug.cpu#45.nr_load_updates
4919 Â 6% +91.3% 9413 Â 12% sched_debug.cpu#46.ttwu_local
316524 Â 4% +61.2% 510277 Â 29% sched_debug.cpu#46.avg_idle
213391 Â 0% -29.7% 150024 Â 22% sched_debug.cpu#46.nr_load_updates
1 Â 34% +260.0% 4 Â 45% sched_debug.cpu#47.cpu_load[2]
213365 Â 0% -29.3% 150874 Â 21% sched_debug.cpu#47.nr_load_updates
4959 Â 5% +110.2% 10424 Â 12% sched_debug.cpu#47.ttwu_local
297977 Â 5% +70.9% 509246 Â 27% sched_debug.cpu#47.avg_idle
5010 Â 5% +87.0% 9370 Â 10% sched_debug.cpu#48.ttwu_local
295224 Â 14% +75.4% 517762 Â 20% sched_debug.cpu#48.avg_idle
212733 Â 0% -29.1% 150866 Â 21% sched_debug.cpu#48.nr_load_updates
1 Â 0% +400.0% 5 Â 34% sched_debug.cpu#48.cpu_load[0]
212347 Â 0% -29.2% 150357 Â 21% sched_debug.cpu#49.nr_load_updates
307183 Â 7% +65.7% 509139 Â 22% sched_debug.cpu#49.avg_idle
5076 Â 8% +70.0% 8630 Â 13% sched_debug.cpu#49.ttwu_local
5672 Â 47% -53.8% 2619 Â 33% sched_debug.cpu#49.curr->pid
1 Â 0% +325.0% 4 Â 10% sched_debug.cpu#5.cpu_load[1]
4557 Â 11% +112.8% 9695 Â 26% sched_debug.cpu#5.ttwu_local
5632 Â 45% +100.1% 11273 Â 22% sched_debug.cpu#5.curr->pid
1 Â 24% +128.6% 4 Â 0% sched_debug.cpu#5.cpu_load[2]
529333 Â 3% +14.2% 604409 Â 5% sched_debug.cpu#5.sched_goidle
1079377 Â 3% +21.0% 1306000 Â 5% sched_debug.cpu#5.nr_switches
413773 Â 4% +33.3% 551394 Â 9% sched_debug.cpu#5.ttwu_count
1099369 Â 3% +22.0% 1340945 Â 5% sched_debug.cpu#5.sched_count
338950 Â 12% +56.4% 530174 Â 22% sched_debug.cpu#50.avg_idle
4950 Â 5% +71.8% 8507 Â 12% sched_debug.cpu#50.ttwu_local
211756 Â 0% -29.1% 150035 Â 21% sched_debug.cpu#50.nr_load_updates
211395 Â 0% -28.7% 150623 Â 21% sched_debug.cpu#51.nr_load_updates
333949 Â 8% +54.1% 514474 Â 25% sched_debug.cpu#51.avg_idle
4856 Â 7% +85.0% 8985 Â 12% sched_debug.cpu#51.ttwu_local
211096 Â 0% -28.4% 151073 Â 21% sched_debug.cpu#52.nr_load_updates
4834 Â 7% +110.5% 10175 Â 9% sched_debug.cpu#52.ttwu_local
325681 Â 1% +60.7% 523453 Â 27% sched_debug.cpu#52.avg_idle
1 Â 0% +425.0% 5 Â 43% sched_debug.cpu#53.cpu_load[1]
210826 Â 0% -28.6% 150475 Â 21% sched_debug.cpu#53.nr_load_updates
322501 Â 11% +60.3% 516953 Â 22% sched_debug.cpu#53.avg_idle
4764 Â 8% +89.9% 9047 Â 14% sched_debug.cpu#53.ttwu_local
210701 Â 0% -28.8% 150099 Â 21% sched_debug.cpu#54.nr_load_updates
338710 Â 11% +55.1% 525412 Â 22% sched_debug.cpu#54.avg_idle
1 Â 0% +300.0% 4 Â 50% sched_debug.cpu#54.cpu_load[0]
4884 Â 5% +89.3% 9248 Â 12% sched_debug.cpu#54.ttwu_local
210894 Â 0% -28.7% 150271 Â 21% sched_debug.cpu#55.nr_load_updates
5071 Â 5% +84.2% 9341 Â 6% sched_debug.cpu#55.ttwu_local
325786 Â 2% +60.5% 522749 Â 20% sched_debug.cpu#55.avg_idle
4926 Â 6% +87.5% 9236 Â 17% sched_debug.cpu#56.ttwu_local
210935 Â 0% -28.7% 150334 Â 21% sched_debug.cpu#56.nr_load_updates
322812 Â 7% +64.7% 531744 Â 18% sched_debug.cpu#56.avg_idle
3 Â 39% -61.5% 1 Â 34% sched_debug.cpu#56.cpu_load[4]
4935 Â 7% +89.4% 9345 Â 12% sched_debug.cpu#57.ttwu_local
308906 Â 6% +68.8% 521531 Â 18% sched_debug.cpu#57.avg_idle
210966 Â 0% -28.8% 150297 Â 21% sched_debug.cpu#57.nr_load_updates
4881 Â 8% +89.1% 9229 Â 9% sched_debug.cpu#58.ttwu_local
211377 Â 0% -29.1% 149801 Â 21% sched_debug.cpu#58.nr_load_updates
313176 Â 7% +62.7% 509465 Â 21% sched_debug.cpu#58.avg_idle
332981 Â 7% +52.0% 506078 Â 23% sched_debug.cpu#59.avg_idle
4903 Â 6% +88.8% 9259 Â 16% sched_debug.cpu#59.ttwu_local
211798 Â 0% -29.2% 150043 Â 21% sched_debug.cpu#59.nr_load_updates
408039 Â 4% +35.0% 550837 Â 9% sched_debug.cpu#6.ttwu_count
522969 Â 3% +16.1% 607170 Â 5% sched_debug.cpu#6.sched_goidle
4818 Â 17% +103.6% 9809 Â 25% sched_debug.cpu#6.ttwu_local
1067408 Â 3% +22.9% 1311321 Â 4% sched_debug.cpu#6.nr_switches
1098484 Â 3% +22.6% 1346438 Â 4% sched_debug.cpu#6.sched_count
159254 Â 1% -37.4% 99737 Â 5% sched_debug.cpu#60.nr_load_updates
465823 Â 5% -29.8% 327041 Â 5% sched_debug.cpu#60.nr_switches
472826 Â 5% -29.4% 333802 Â 5% sched_debug.cpu#60.sched_count
4 Â 40% -72.2% 1 Â 34% sched_debug.cpu#60.cpu_load[4]
2046 Â 12% +254.3% 7252 Â 28% sched_debug.cpu#60.ttwu_local
226846 Â 5% -57.3% 96883 Â 6% sched_debug.cpu#60.sched_goidle
375332 Â 4% +112.6% 797875 Â 2% sched_debug.cpu#60.avg_idle
11749 Â 14% +348.3% 52676 Â 2% sched_debug.cpu#60.nr_uninterruptible
2192 Â 13% +228.1% 7192 Â 29% sched_debug.cpu#61.ttwu_local
13261 Â 13% +323.8% 56208 Â 3% sched_debug.cpu#61.nr_uninterruptible
258980 Â 3% -11.4% 229352 Â 7% sched_debug.cpu#61.ttwu_count
8 Â 24% -68.6% 2 Â 30% sched_debug.cpu#61.cpu_load[2]
170014 Â 1% -40.7% 100819 Â 5% sched_debug.cpu#61.nr_load_updates
526023 Â 4% -35.3% 340593 Â 5% sched_debug.cpu#61.sched_count
253250 Â 4% -61.4% 97676 Â 6% sched_debug.cpu#61.sched_goidle
8 Â 26% -78.1% 1 Â 24% sched_debug.cpu#61.cpu_load[3]
6 Â 28% -81.5% 1 Â 34% sched_debug.cpu#61.cpu_load[4]
351138 Â 4% +136.7% 831304 Â 4% sched_debug.cpu#61.avg_idle
518488 Â 4% -35.6% 333844 Â 5% sched_debug.cpu#61.nr_switches
166210 Â 0% -39.4% 100751 Â 5% sched_debug.cpu#62.nr_load_updates
4 Â 40% -78.9% 1 Â 0% sched_debug.cpu#62.cpu_load[4]
7 Â 38% -79.3% 1 Â 33% sched_debug.cpu#62.cpu_load[3]
487117 Â 4% -31.6% 332975 Â 5% sched_debug.cpu#62.nr_switches
10 Â 47% -76.2% 2 Â 20% sched_debug.cpu#62.cpu_load[2]
2007 Â 12% +257.3% 7174 Â 28% sched_debug.cpu#62.ttwu_local
351310 Â 5% +137.9% 835592 Â 2% sched_debug.cpu#62.avg_idle
494319 Â 4% -31.3% 339633 Â 5% sched_debug.cpu#62.sched_count
11740 Â 14% +376.8% 55979 Â 5% sched_debug.cpu#62.nr_uninterruptible
237774 Â 4% -59.0% 97456 Â 6% sched_debug.cpu#62.sched_goidle
163519 Â 0% -38.4% 100702 Â 5% sched_debug.cpu#63.nr_load_updates
19 Â 39% -77.6% 4 Â 30% sched_debug.cpu#63.cpu_load[0]
8 Â 23% -72.7% 2 Â 19% sched_debug.cpu#63.cpu_load[2]
5 Â 25% -82.6% 1 Â 0% sched_debug.cpu#63.cpu_load[4]
7 Â 24% -78.6% 1 Â 33% sched_debug.cpu#63.cpu_load[3]
11 Â 28% -69.6% 3 Â 14% sched_debug.cpu#63.cpu_load[1]
383806 Â 6% +107.3% 795485 Â 2% sched_debug.cpu#63.avg_idle
230985 Â 3% -57.7% 97730 Â 6% sched_debug.cpu#63.sched_goidle
473483 Â 4% -29.6% 333448 Â 5% sched_debug.cpu#63.nr_switches
11189 Â 12% +402.7% 56248 Â 4% sched_debug.cpu#63.nr_uninterruptible
1967 Â 9% +268.6% 7251 Â 30% sched_debug.cpu#63.ttwu_local
480528 Â 4% -29.0% 341011 Â 5% sched_debug.cpu#63.sched_count
360584 Â 1% +124.2% 808561 Â 5% sched_debug.cpu#64.avg_idle
6 Â 49% -84.6% 1 Â 0% sched_debug.cpu#64.cpu_load[4]
162032 Â 1% -37.9% 100543 Â 5% sched_debug.cpu#64.nr_load_updates
471634 Â 4% -27.6% 341321 Â 5% sched_debug.cpu#64.sched_count
464736 Â 4% -28.0% 334706 Â 5% sched_debug.cpu#64.nr_switches
1924 Â 10% +271.0% 7140 Â 28% sched_debug.cpu#64.ttwu_local
226602 Â 4% -56.8% 97801 Â 7% sched_debug.cpu#64.sched_goidle
10882 Â 13% +426.6% 57312 Â 2% sched_debug.cpu#64.nr_uninterruptible
454639 Â 4% -26.7% 333349 Â 5% sched_debug.cpu#65.nr_switches
461901 Â 4% -26.3% 340369 Â 6% sched_debug.cpu#65.sched_count
0 Â 0% +Inf% 1 Â 24% sched_debug.cpu#65.nr_running
10689 Â 12% +426.0% 56232 Â 3% sched_debug.cpu#65.nr_uninterruptible
369695 Â 3% +115.9% 798046 Â 4% sched_debug.cpu#65.avg_idle
5 Â 28% -81.0% 1 Â 0% sched_debug.cpu#65.cpu_load[4]
221512 Â 4% -55.9% 97706 Â 7% sched_debug.cpu#65.sched_goidle
160420 Â 1% -37.3% 100577 Â 5% sched_debug.cpu#65.nr_load_updates
5 Â 28% -65.2% 2 Â 0% sched_debug.cpu#65.cpu_load[3]
1926 Â 11% +275.3% 7228 Â 28% sched_debug.cpu#65.ttwu_local
5 Â 20% -72.7% 1 Â 33% sched_debug.cpu#66.cpu_load[3]
7 Â 15% -65.5% 2 Â 20% sched_debug.cpu#66.cpu_load[2]
1859 Â 9% +282.2% 7106 Â 29% sched_debug.cpu#66.ttwu_local
159053 Â 1% -36.6% 100774 Â 5% sched_debug.cpu#66.nr_load_updates
8 Â 29% -61.8% 3 Â 13% sched_debug.cpu#66.cpu_load[1]
457600 Â 4% -25.3% 341731 Â 5% sched_debug.cpu#66.sched_count
10564 Â 14% +438.2% 56855 Â 4% sched_debug.cpu#66.nr_uninterruptible
369343 Â 8% +118.8% 808095 Â 3% sched_debug.cpu#66.avg_idle
4 Â 30% -75.0% 1 Â 0% sched_debug.cpu#66.cpu_load[4]
219472 Â 3% -55.3% 98188 Â 6% sched_debug.cpu#66.sched_goidle
450502 Â 4% -25.7% 334770 Â 5% sched_debug.cpu#66.nr_switches
466476 Â 6% -27.1% 339837 Â 5% sched_debug.cpu#67.sched_count
160888 Â 2% -37.7% 100182 Â 5% sched_debug.cpu#67.nr_load_updates
5 Â 37% -69.6% 1 Â 24% sched_debug.cpu#67.cpu_load[3]
459640 Â 6% -27.6% 332819 Â 5% sched_debug.cpu#67.nr_switches
5 Â 24% -76.2% 1 Â 34% sched_debug.cpu#67.cpu_load[4]
10908 Â 17% +417.4% 56441 Â 3% sched_debug.cpu#67.nr_uninterruptible
1935 Â 9% +273.9% 7235 Â 30% sched_debug.cpu#67.ttwu_local
224021 Â 6% -56.4% 97632 Â 6% sched_debug.cpu#67.sched_goidle
349251 Â 6% +128.9% 799422 Â 1% sched_debug.cpu#67.avg_idle
0 Â 0% +Inf% 1 Â 34% sched_debug.cpu#67.nr_running
1907 Â 11% +280.5% 7256 Â 29% sched_debug.cpu#68.ttwu_local
219479 Â 4% -55.6% 97442 Â 6% sched_debug.cpu#68.sched_goidle
450587 Â 5% -26.0% 333323 Â 5% sched_debug.cpu#68.nr_switches
392113 Â 8% +108.3% 816905 Â 2% sched_debug.cpu#68.avg_idle
0 Â 0% +Inf% 1 Â 33% sched_debug.cpu#68.nr_running
5 Â 34% -81.0% 1 Â 0% sched_debug.cpu#68.cpu_load[4]
159530 Â 1% -37.2% 100191 Â 5% sched_debug.cpu#68.nr_load_updates
457458 Â 5% -25.7% 340032 Â 5% sched_debug.cpu#68.sched_count
10515 Â 14% +439.1% 56681 Â 3% sched_debug.cpu#68.nr_uninterruptible
10444 Â 14% +450.7% 57517 Â 1% sched_debug.cpu#69.nr_uninterruptible
446109 Â 4% -25.1% 334321 Â 5% sched_debug.cpu#69.nr_switches
452933 Â 4% -24.7% 340979 Â 5% sched_debug.cpu#69.sched_count
217204 Â 4% -55.1% 97621 Â 6% sched_debug.cpu#69.sched_goidle
158575 Â 1% -36.8% 100204 Â 5% sched_debug.cpu#69.nr_load_updates
351220 Â 4% +137.5% 834165 Â 3% sched_debug.cpu#69.avg_idle
1876 Â 11% +283.9% 7204 Â 28% sched_debug.cpu#69.ttwu_local
1091164 Â 3% +24.1% 1354466 Â 5% sched_debug.cpu#7.sched_count
523643 Â 3% +16.7% 611192 Â 5% sched_debug.cpu#7.sched_goidle
410239 Â 4% +34.6% 552357 Â 9% sched_debug.cpu#7.ttwu_count
4561 Â 10% +124.7% 10249 Â 21% sched_debug.cpu#7.ttwu_local
1068057 Â 3% +23.5% 1319002 Â 5% sched_debug.cpu#7.nr_switches
10279 Â 14% +448.2% 56356 Â 2% sched_debug.cpu#70.nr_uninterruptible
352820 Â 9% +134.6% 827636 Â 4% sched_debug.cpu#70.avg_idle
9783 Â 25% +48.0% 14480 Â 8% sched_debug.cpu#70.curr->pid
449808 Â 4% -24.4% 340071 Â 5% sched_debug.cpu#70.sched_count
215565 Â 4% -54.9% 97278 Â 6% sched_debug.cpu#70.sched_goidle
442796 Â 5% -24.7% 333367 Â 5% sched_debug.cpu#70.nr_switches
157886 Â 1% -36.6% 100081 Â 5% sched_debug.cpu#70.nr_load_updates
3 Â 31% -71.4% 1 Â 0% sched_debug.cpu#70.cpu_load[4]
0 Â 0% +Inf% 1 Â 0% sched_debug.cpu#70.nr_running
1847 Â 12% +290.8% 7217 Â 29% sched_debug.cpu#70.ttwu_local
10300 Â 15% +445.5% 56190 Â 2% sched_debug.cpu#71.nr_uninterruptible
440375 Â 5% -24.4% 332977 Â 4% sched_debug.cpu#71.nr_switches
1857 Â 12% +287.9% 7205 Â 30% sched_debug.cpu#71.ttwu_local
447123 Â 5% -24.0% 339605 Â 4% sched_debug.cpu#71.sched_count
372180 Â 6% +119.1% 815314 Â 2% sched_debug.cpu#71.avg_idle
214355 Â 4% -54.7% 97027 Â 6% sched_debug.cpu#71.sched_goidle
157564 Â 1% -36.4% 100194 Â 4% sched_debug.cpu#71.nr_load_updates
1848 Â 12% +290.7% 7220 Â 29% sched_debug.cpu#72.ttwu_local
9980 Â 32% +51.9% 15161 Â 35% sched_debug.cpu#72.curr->pid
7 Â 10% -78.6% 1 Â 33% sched_debug.cpu#72.cpu_load[4]
389711 Â 4% +112.4% 827764 Â 1% sched_debug.cpu#72.avg_idle
8 Â 24% -79.4% 1 Â 47% sched_debug.cpu#72.cpu_load[3]
439193 Â 5% -23.8% 334863 Â 5% sched_debug.cpu#72.nr_switches
445902 Â 5% -23.4% 341459 Â 5% sched_debug.cpu#72.sched_count
213663 Â 5% -54.6% 96938 Â 6% sched_debug.cpu#72.sched_goidle
157330 Â 1% -36.1% 100472 Â 5% sched_debug.cpu#72.nr_load_updates
10371 Â 15% +454.6% 57521 Â 2% sched_debug.cpu#72.nr_uninterruptible
7 Â 39% -65.5% 2 Â 20% sched_debug.cpu#73.cpu_load[2]
214049 Â 5% -54.6% 97108 Â 6% sched_debug.cpu#73.sched_goidle
10495 Â 16% +434.3% 56076 Â 3% sched_debug.cpu#73.nr_uninterruptible
1824 Â 14% +299.2% 7281 Â 30% sched_debug.cpu#73.ttwu_local
6 Â 45% -70.8% 1 Â 24% sched_debug.cpu#73.cpu_load[3]
440090 Â 6% -24.2% 333514 Â 4% sched_debug.cpu#73.nr_switches
398199 Â 4% +100.8% 799630 Â 2% sched_debug.cpu#73.avg_idle
446777 Â 6% -23.9% 339998 Â 4% sched_debug.cpu#73.sched_count
157615 Â 1% -36.4% 100184 Â 5% sched_debug.cpu#73.nr_load_updates
10541 Â 16% +435.4% 56435 Â 3% sched_debug.cpu#74.nr_uninterruptible
448101 Â 5% -24.4% 338746 Â 5% sched_debug.cpu#74.sched_count
394634 Â 4% +101.7% 796067 Â 1% sched_debug.cpu#74.avg_idle
17 Â 43% -77.9% 3 Â 22% sched_debug.cpu#74.cpu_load[1]
441455 Â 5% -24.7% 332301 Â 5% sched_debug.cpu#74.nr_switches
1844 Â 11% +291.8% 7227 Â 30% sched_debug.cpu#74.ttwu_local
157582 Â 1% -36.7% 99748 Â 5% sched_debug.cpu#74.nr_load_updates
9 Â 28% -81.6% 1 Â 24% sched_debug.cpu#74.cpu_load[3]
214781 Â 5% -55.1% 96404 Â 6% sched_debug.cpu#74.sched_goidle
7 Â 26% -82.1% 1 Â 34% sched_debug.cpu#74.cpu_load[4]
12 Â 35% -80.0% 2 Â 20% sched_debug.cpu#74.cpu_load[2]
164640 Â 1% -52.7% 77949 Â 26% sched_debug.cpu#75.nr_load_updates
361203 Â 5% +153.4% 915258 Â 5% sched_debug.cpu#75.avg_idle
483086 Â 1% -51.1% 236050 Â 37% sched_debug.cpu#75.sched_count
476102 Â 1% -51.3% 231994 Â 36% sched_debug.cpu#75.nr_switches
232077 Â 1% -68.4% 73385 Â 27% sched_debug.cpu#75.sched_goidle
475860 Â 1% -50.4% 235843 Â 37% sched_debug.cpu#76.sched_count
370986 Â 4% +140.8% 893396 Â 7% sched_debug.cpu#76.avg_idle
468899 Â 1% -50.6% 231822 Â 37% sched_debug.cpu#76.nr_switches
163074 Â 0% -52.4% 77553 Â 26% sched_debug.cpu#76.nr_load_updates
228349 Â 1% -68.1% 72873 Â 28% sched_debug.cpu#76.sched_goidle
0 Â 0% +Inf% 25 Â 38% sched_debug.cpu#76.load
363660 Â 1% +148.2% 902694 Â 4% sched_debug.cpu#77.avg_idle
476100 Â 6% -50.2% 236976 Â 37% sched_debug.cpu#77.sched_count
228629 Â 5% -68.1% 72878 Â 28% sched_debug.cpu#77.sched_goidle
163638 Â 3% -52.6% 77542 Â 27% sched_debug.cpu#77.nr_load_updates
469211 Â 6% -50.5% 232239 Â 37% sched_debug.cpu#77.nr_switches
460611 Â 4% -49.7% 231831 Â 37% sched_debug.cpu#78.nr_switches
467338 Â 4% -49.5% 235855 Â 37% sched_debug.cpu#78.sched_count
224421 Â 4% -67.6% 72672 Â 28% sched_debug.cpu#78.sched_goidle
381951 Â 13% +136.3% 902531 Â 6% sched_debug.cpu#78.avg_idle
162704 Â 2% -52.4% 77434 Â 27% sched_debug.cpu#78.nr_load_updates
221400 Â 3% -66.8% 73518 Â 28% sched_debug.cpu#79.sched_goidle
397648 Â 7% +127.9% 906260 Â 5% sched_debug.cpu#79.avg_idle
461046 Â 3% -48.4% 237777 Â 37% sched_debug.cpu#79.sched_count
454447 Â 3% -48.6% 233725 Â 37% sched_debug.cpu#79.nr_switches
161593 Â 2% -51.8% 77813 Â 26% sched_debug.cpu#79.nr_load_updates
4503 Â 11% +117.6% 9799 Â 25% sched_debug.cpu#8.ttwu_local
1064520 Â 3% +23.4% 1313774 Â 4% sched_debug.cpu#8.nr_switches
521824 Â 3% +16.6% 608464 Â 5% sched_debug.cpu#8.sched_goidle
407022 Â 5% +35.5% 551655 Â 9% sched_debug.cpu#8.ttwu_count
1084793 Â 3% +24.3% 1348477 Â 4% sched_debug.cpu#8.sched_count
449003 Â 4% -48.8% 230108 Â 37% sched_debug.cpu#80.nr_switches
218669 Â 4% -67.0% 72091 Â 28% sched_debug.cpu#80.sched_goidle
455752 Â 4% -48.3% 235485 Â 37% sched_debug.cpu#80.sched_count
355619 Â 11% +154.8% 906078 Â 5% sched_debug.cpu#80.avg_idle
160754 Â 2% -52.2% 76880 Â 27% sched_debug.cpu#80.nr_load_updates
216571 Â 4% -66.5% 72533 Â 29% sched_debug.cpu#81.sched_goidle
375573 Â 5% +137.9% 893511 Â 6% sched_debug.cpu#81.avg_idle
160219 Â 2% -51.7% 77329 Â 28% sched_debug.cpu#81.nr_load_updates
451643 Â 4% -47.6% 236789 Â 38% sched_debug.cpu#81.sched_count
444845 Â 4% -47.7% 232555 Â 37% sched_debug.cpu#81.nr_switches
215428 Â 4% -66.6% 71893 Â 29% sched_debug.cpu#82.sched_goidle
368902 Â 6% +152.6% 931727 Â 3% sched_debug.cpu#82.avg_idle
442822 Â 4% -48.0% 230123 Â 37% sched_debug.cpu#82.nr_switches
159684 Â 2% -52.1% 76475 Â 27% sched_debug.cpu#82.nr_load_updates
449549 Â 4% -47.8% 234604 Â 37% sched_debug.cpu#82.sched_count
159520 Â 2% -51.4% 77568 Â 27% sched_debug.cpu#83.nr_load_updates
449125 Â 4% -46.8% 239047 Â 38% sched_debug.cpu#83.sched_count
215386 Â 4% -66.1% 72964 Â 28% sched_debug.cpu#83.sched_goidle
385918 Â 6% +136.5% 912662 Â 5% sched_debug.cpu#83.avg_idle
442478 Â 4% -47.3% 233229 Â 37% sched_debug.cpu#83.nr_switches
449876 Â 4% -47.3% 236952 Â 37% sched_debug.cpu#84.sched_count
443343 Â 4% -47.6% 232453 Â 37% sched_debug.cpu#84.nr_switches
159522 Â 2% -51.7% 77037 Â 27% sched_debug.cpu#84.nr_load_updates
215677 Â 4% -66.3% 72592 Â 29% sched_debug.cpu#84.sched_goidle
364262 Â 13% +148.5% 905299 Â 5% sched_debug.cpu#84.avg_idle
444975 Â 3% -48.1% 230920 Â 37% sched_debug.cpu#85.nr_switches
216569 Â 3% -66.8% 71874 Â 29% sched_debug.cpu#85.sched_goidle
451627 Â 3% -48.0% 234975 Â 37% sched_debug.cpu#85.sched_count
387650 Â 7% +129.1% 888267 Â 7% sched_debug.cpu#85.avg_idle
159827 Â 2% -51.9% 76809 Â 27% sched_debug.cpu#85.nr_load_updates
398952 Â 16% +120.7% 880379 Â 7% sched_debug.cpu#86.avg_idle
447181 Â 3% -48.3% 231357 Â 37% sched_debug.cpu#86.nr_switches
159916 Â 2% -51.8% 77000 Â 27% sched_debug.cpu#86.nr_load_updates
453947 Â 3% -48.1% 235643 Â 37% sched_debug.cpu#86.sched_count
217599 Â 3% -67.1% 71651 Â 28% sched_debug.cpu#86.sched_goidle
160324 Â 2% -52.1% 76764 Â 27% sched_debug.cpu#87.nr_load_updates
374310 Â 6% +138.1% 891320 Â 6% sched_debug.cpu#87.avg_idle
220101 Â 3% -67.3% 72026 Â 28% sched_debug.cpu#87.sched_goidle
458878 Â 3% -48.7% 235540 Â 37% sched_debug.cpu#87.sched_count
452233 Â 3% -48.8% 231554 Â 37% sched_debug.cpu#87.nr_switches
160998 Â 1% -52.2% 76976 Â 27% sched_debug.cpu#88.nr_load_updates
388490 Â 10% +133.5% 907200 Â 6% sched_debug.cpu#88.avg_idle
464167 Â 3% -49.3% 235524 Â 37% sched_debug.cpu#88.sched_count
222658 Â 2% -67.8% 71589 Â 28% sched_debug.cpu#88.sched_goidle
457358 Â 3% -49.5% 230996 Â 37% sched_debug.cpu#88.nr_switches
373956 Â 11% +142.8% 908123 Â 4% sched_debug.cpu#89.avg_idle
227255 Â 2% -68.5% 71532 Â 29% sched_debug.cpu#89.sched_goidle
466727 Â 2% -50.5% 231136 Â 38% sched_debug.cpu#89.nr_switches
161927 Â 1% -52.7% 76598 Â 28% sched_debug.cpu#89.nr_load_updates
473645 Â 2% -50.3% 235205 Â 38% sched_debug.cpu#89.sched_count
1081031 Â 3% +25.0% 1351582 Â 4% sched_debug.cpu#9.sched_count
0 Â 0% +Inf% 17 Â 47% sched_debug.cpu#9.load
402463 Â 4% +36.8% 550573 Â 9% sched_debug.cpu#9.ttwu_count
518725 Â 3% +17.5% 609468 Â 4% sched_debug.cpu#9.sched_goidle
1058429 Â 3% +24.5% 1317308 Â 4% sched_debug.cpu#9.nr_switches
4530 Â 14% +118.6% 9905 Â 28% sched_debug.cpu#9.ttwu_local
330519 Â 11% +154.1% 839727 Â 6% sched_debug.cpu#90.avg_idle
247920 Â 7% -23.5% 189659 Â 21% sched_debug.cpu#90.ttwu_count
11641 Â 14% +310.1% 47738 Â 37% sched_debug.cpu#90.nr_uninterruptible
12 Â 26% -86.0% 1 Â 47% sched_debug.cpu#90.cpu_load[2]
9 Â 27% -84.2% 1 Â 33% sched_debug.cpu#90.cpu_load[3]
236121 Â 4% -62.6% 88204 Â 16% sched_debug.cpu#90.sched_goidle
492698 Â 4% -39.5% 297909 Â 20% sched_debug.cpu#90.sched_count
484160 Â 5% -39.8% 291626 Â 20% sched_debug.cpu#90.nr_switches
164750 Â 1% -45.4% 89920 Â 12% sched_debug.cpu#90.nr_load_updates
13 Â 36% -80.0% 2 Â 30% sched_debug.cpu#90.cpu_load[1]
2380 Â 22% +161.2% 6218 Â 25% sched_debug.cpu#90.ttwu_local
10 Â 22% -78.0% 2 Â 19% sched_debug.cpu#91.cpu_load[2]
6 Â 17% -84.6% 1 Â 0% sched_debug.cpu#91.cpu_load[4]
1958 Â 11% +216.4% 6195 Â 25% sched_debug.cpu#91.ttwu_local
466743 Â 4% -37.1% 293473 Â 20% sched_debug.cpu#91.nr_switches
11129 Â 13% +339.4% 48904 Â 38% sched_debug.cpu#91.nr_uninterruptible
11 Â 37% -73.3% 3 Â 0% sched_debug.cpu#91.cpu_load[1]
161859 Â 2% -44.3% 90199 Â 12% sched_debug.cpu#91.nr_load_updates
227465 Â 4% -61.2% 88334 Â 16% sched_debug.cpu#91.sched_goidle
8 Â 23% -78.1% 1 Â 24% sched_debug.cpu#91.cpu_load[3]
473602 Â 4% -36.6% 300206 Â 20% sched_debug.cpu#91.sched_count
365768 Â 8% +131.9% 848199 Â 5% sched_debug.cpu#91.avg_idle
464831 Â 5% -35.8% 298566 Â 20% sched_debug.cpu#92.sched_count
1909 Â 12% +225.2% 6209 Â 25% sched_debug.cpu#92.ttwu_local
6 Â 41% -84.0% 1 Â 0% sched_debug.cpu#92.cpu_load[4]
10722 Â 14% +354.4% 48718 Â 37% sched_debug.cpu#92.nr_uninterruptible
223142 Â 5% -60.5% 88183 Â 16% sched_debug.cpu#92.sched_goidle
160290 Â 2% -43.9% 89915 Â 12% sched_debug.cpu#92.nr_load_updates
379115 Â 7% +132.0% 879627 Â 2% sched_debug.cpu#92.avg_idle
458041 Â 5% -36.1% 292684 Â 20% sched_debug.cpu#92.nr_switches
159097 Â 2% -43.6% 89715 Â 12% sched_debug.cpu#93.nr_load_updates
363321 Â 9% +135.0% 853706 Â 4% sched_debug.cpu#93.avg_idle
9 Â 38% -66.7% 3 Â 13% sched_debug.cpu#93.cpu_load[1]
449508 Â 5% -35.0% 291986 Â 20% sched_debug.cpu#93.nr_switches
218847 Â 5% -59.9% 87722 Â 16% sched_debug.cpu#93.sched_goidle
456189 Â 5% -34.6% 298447 Â 20% sched_debug.cpu#93.sched_count
9 Â 20% -73.0% 2 Â 20% sched_debug.cpu#93.cpu_load[2]
8 Â 8% -78.1% 1 Â 24% sched_debug.cpu#93.cpu_load[3]
10526 Â 14% +364.2% 48867 Â 36% sched_debug.cpu#93.nr_uninterruptible
6 Â 7% -80.8% 1 Â 34% sched_debug.cpu#93.cpu_load[4]
1885 Â 12% +225.2% 6131 Â 25% sched_debug.cpu#93.ttwu_local
215555 Â 5% -59.0% 88450 Â 16% sched_debug.cpu#94.sched_goidle
157842 Â 2% -43.0% 90011 Â 12% sched_debug.cpu#94.nr_load_updates
10314 Â 15% +383.2% 49839 Â 37% sched_debug.cpu#94.nr_uninterruptible
373165 Â 12% +128.3% 851916 Â 2% sched_debug.cpu#94.avg_idle
2 Â 0% +87.5% 3 Â 22% sched_debug.cpu#94.cpu_load[0]
443064 Â 5% -33.5% 294760 Â 20% sched_debug.cpu#94.nr_switches
1869 Â 13% +231.5% 6198 Â 23% sched_debug.cpu#94.ttwu_local
449617 Â 5% -33.0% 301360 Â 20% sched_debug.cpu#94.sched_count
10349 Â 13% +364.1% 48028 Â 38% sched_debug.cpu#95.nr_uninterruptible
157490 Â 2% -43.3% 89361 Â 12% sched_debug.cpu#95.nr_load_updates
447749 Â 5% -33.6% 297481 Â 20% sched_debug.cpu#95.sched_count
440550 Â 5% -33.9% 291184 Â 20% sched_debug.cpu#95.nr_switches
380790 Â 12% +129.2% 872691 Â 4% sched_debug.cpu#95.avg_idle
7 Â 40% -83.9% 1 Â 34% sched_debug.cpu#95.cpu_load[3]
214335 Â 5% -59.1% 87594 Â 16% sched_debug.cpu#95.sched_goidle
1843 Â 12% +243.4% 6328 Â 22% sched_debug.cpu#95.ttwu_local
10609 Â 12% +352.8% 48034 Â 35% sched_debug.cpu#96.nr_uninterruptible
220349 Â 4% -60.3% 87478 Â 15% sched_debug.cpu#96.sched_goidle
459952 Â 4% -35.5% 296579 Â 19% sched_debug.cpu#96.sched_count
382794 Â 8% +121.5% 847879 Â 3% sched_debug.cpu#96.avg_idle
8 Â 40% -72.7% 2 Â 19% sched_debug.cpu#96.cpu_load[2]
159883 Â 1% -44.1% 89417 Â 12% sched_debug.cpu#96.nr_load_updates
452633 Â 4% -35.8% 290522 Â 19% sched_debug.cpu#96.nr_switches
1927 Â 13% +223.1% 6226 Â 23% sched_debug.cpu#96.ttwu_local
6 Â 36% -77.8% 1 Â 33% sched_debug.cpu#96.cpu_load[3]
158918 Â 1% -43.8% 89247 Â 12% sched_debug.cpu#97.nr_load_updates
7 Â 27% -83.3% 1 Â 34% sched_debug.cpu#97.cpu_load[4]
371984 Â 10% +126.9% 843866 Â 4% sched_debug.cpu#97.avg_idle
9 Â 42% -81.6% 1 Â 24% sched_debug.cpu#97.cpu_load[3]
1857 Â 11% +231.5% 6158 Â 24% sched_debug.cpu#97.ttwu_local
454841 Â 4% -34.8% 296555 Â 20% sched_debug.cpu#97.sched_count
448082 Â 4% -35.1% 290591 Â 20% sched_debug.cpu#97.nr_switches
218092 Â 4% -59.9% 87476 Â 16% sched_debug.cpu#97.sched_goidle
10490 Â 13% +357.9% 48042 Â 37% sched_debug.cpu#97.nr_uninterruptible
8 Â 26% -85.3% 1 Â 34% sched_debug.cpu#98.cpu_load[3]
363336 Â 8% +139.6% 870418 Â 2% sched_debug.cpu#98.avg_idle
217716 Â 4% -59.6% 88025 Â 15% sched_debug.cpu#98.sched_goidle
6 Â 25% -84.6% 1 Â 0% sched_debug.cpu#98.cpu_load[4]
454322 Â 4% -34.0% 299712 Â 19% sched_debug.cpu#98.sched_count
1891 Â 9% +231.0% 6262 Â 25% sched_debug.cpu#98.ttwu_local
447408 Â 4% -34.5% 293275 Â 20% sched_debug.cpu#98.nr_switches
10 Â 36% -80.5% 2 Â 0% sched_debug.cpu#98.cpu_load[2]
158876 Â 1% -43.4% 89862 Â 12% sched_debug.cpu#98.nr_load_updates
10482 Â 13% +370.9% 49366 Â 38% sched_debug.cpu#98.nr_uninterruptible
10469 Â 14% +370.8% 49296 Â 38% sched_debug.cpu#99.nr_uninterruptible
455622 Â 4% -34.3% 299384 Â 20% sched_debug.cpu#99.sched_count
218527 Â 4% -59.7% 88000 Â 16% sched_debug.cpu#99.sched_goidle
448998 Â 4% -34.6% 293470 Â 20% sched_debug.cpu#99.nr_switches
1912 Â 12% +226.2% 6237 Â 25% sched_debug.cpu#99.ttwu_local
379179 Â 11% +115.2% 815881 Â 3% sched_debug.cpu#99.avg_idle
9 Â 49% -81.1% 1 Â 24% sched_debug.cpu#99.cpu_load[3]
158830 Â 1% -43.5% 89753 Â 12% sched_debug.cpu#99.nr_load_updates
6 Â 39% -81.5% 1 Â 34% sched_debug.cpu#99.cpu_load[4]

brickland3: Brickland Ivy Bridge-EX
Memory: 512G




aim7.time.involuntary_context_switches

1.4e+06 ++----------------------------------------------------------------+
O |
1.2e+06 ++ O |
| O O O |
1e+06 ++ |
| |
800000 ++ |
| |
600000 ++ |
| |
400000 ++ O |
| O O O
200000 ++ |
| ....*
0 *+------*--------*-------*-------*-------*--------*-------*-------+


cpuidle.POLL.usage

300 ++--------------------------------------------------------------------+
| |
250 ++ ..*.... |
| .... ... |
*.. . ....*...... |
200 ++ *... .. ....*........*....... |
| *.... *........*
150 ++ |
| |
100 ++ |
| O O O
| O O |
50 O+ O |
| O O |
0 ++--------------------------------------------------------------------+


cpuidle.C1-IVT-4S.usage

2e+07 ++----------------------------------------------------------------+
1.8e+07 ++ O O |
| |
1.6e+07 ++ O O
1.4e+07 ++ |
O O |
1.2e+07 ++ O O O |
1e+07 ++ |
8e+06 ++ |
| |
6e+06 ++ |
4e+06 ++ ....*
| ....*.......*.......*.......*........*.......*... |
2e+06 *+......*.... |
0 ++----------------------------------------------------------------+


numa-vmstat.node1.numa_other

85000 ++-------------------------------------------------O----------------+
O O O O O O O O
80000 ++ |
| |
75000 ++ |
| |
70000 ++ |
| |
65000 ++ |
| |
60000 ++ |
| |
55000 ++ |
*........*.......*........*.......*........*.......*........*.......*
50000 ++------------------------------------------------------------------+


numa-vmstat.node2.numa_other

85000 ++------------------------------------------------------------------+
*........*.......*........*.......*........*.......*........*.......*
80000 ++ |
| |
75000 ++ |
| |
70000 ++ |
| |
65000 ++ |
| |
60000 ++ |
| |
55000 ++ |
O O O O O O O O O
50000 ++------------------------------------------------------------------+


proc-vmstat.nr_anon_pages

56000 ++------------------------------------------------------------------+
*........*..... ....*... |
54000 ++ .. ....*.......*.... ... ..*
52000 ++ *.... . .... |
| *........*. |
50000 ++ |
48000 ++ |
| |
46000 ++ |
44000 ++ O O O O
| |
42000 ++ |
40000 O+ O O O |
| O |
38000 ++------------------------------------------------------------------+


meminfo.AnonPages

220000 *+-----------------------------------------------------------------+
| *........ ....*.......*..... ..*
210000 ++ *.......*.... .. .... |
| *........*. |
200000 ++ |
| |
190000 ++ |
| |
180000 ++ |
| O O O O
170000 ++ |
O |
160000 ++ O O O O |
| |
150000 ++-----------------------------------------------------------------+


slabinfo.mm_struct.active_objs

11000 ++------------------------------------------------------------------+
| |
10500 ++ O |
O O O |
| |
10000 ++ O |
| |
9500 ++ O |
| O O O
9000 ++ |
| |
| |
8500 *+.......*.......*........*.......*........*.......*........*.......*
| |
8000 ++------------------------------------------------------------------+


slabinfo.mm_struct.num_objs

11000 ++------------------------------------------------------------------+
| O |
O O |
10500 ++ O |
| |
| O |
10000 ++ |
| |
9500 ++ O O |
| O O
| |
9000 ++ |
| |
*........*....... ....*........*.......*........*.......|
8500 ++---------------*--------*-----------------------------------------*


slabinfo.kmalloc-128.active_objs

100000 ++-----------------------------------------------------------------+
O O O |
90000 ++ O O |
| |
80000 ++ O O O
| O |
70000 ++ |
| |
60000 ++ |
| |
50000 ++ |
| ....*
40000 *+...... ....*....... ....*....... ....*........*... |
| *.... *.... *... |
30000 ++-----------------------------------------------------------------+


slabinfo.kmalloc-128.num_objs

100000 ++-----------------------------------------------------------------+
O O O O |
90000 ++ O |
| |
80000 ++ O O O
| O |
70000 ++ |
| |
60000 ++ |
| |
50000 ++ |
| ....*
40000 *+...... ....*....... ....*....... ....*........*... |
| *.... *.... *... |
30000 ++-----------------------------------------------------------------+


slabinfo.kmalloc-128.active_slabs

1600 ++-------------------------------------------------------------------+
1500 ++ O O |
O O O |
1400 ++ |
1300 ++ O |
1200 ++ O O O
1100 ++ |
| |
1000 ++ |
900 ++ |
800 ++ |
700 ++ |
*........ ....*....... ....*....... ....*
600 ++ *.......*........*.... *.... *.... |
500 ++-------------------------------------------------------------------+


slabinfo.kmalloc-128.num_slabs

1600 ++-------------------------------------------------------------------+
1500 ++ O O |
O O O |
1400 ++ |
1300 ++ O |
1200 ++ O O O
1100 ++ |
| |
1000 ++ |
900 ++ |
800 ++ |
700 ++ |
*........ ....*....... ....*....... ....*
600 ++ *.......*........*.... *.... *.... |
500 ++-------------------------------------------------------------------+


[*] bisect-good sample
[O] bisect-bad sample

To reproduce:

apt-get install ruby
git clone git://git.kernel.org/pub/scm/linux/kernel/git/wfg/lkp-tests.git
cd lkp-tests
bin/setup-local job.yaml # the job file attached in this email
bin/run-local job.yaml


Disclaimer:
Results have been estimated based on internal Intel analysis and are provided
for informational purposes only. Any difference in system hardware or software
design or configuration may affect actual performance.


Thanks,
Ying Huang

---
testcase: aim7
default-monitors:
wait: pre-test
uptime:
iostat:
vmstat:
numa-numastat:
numa-vmstat:
numa-meminfo:
proc-vmstat:
proc-stat:
interval: 10
meminfo:
slabinfo:
interrupts:
lock_stat:
latency_stats:
softirqs:
bdi_dev_mapping:
diskstats:
nfsstat:
cpuidle:
cpufreq-stats:
turbostat:
pmeter:
sched_debug:
interval: 60
default-watchdogs:
watch-oom:
watchdog:
cpufreq_governor: performance
model: Brickland Ivy Bridge-EX
nr_cpu: 120
memory: 512G
hdd_partitions:
swap_partitions:
pmeter_server: lkp-st01
pmeter_device: yokogawa-wt310
aim7:
load: 2000
test: fork_test
branch: yuyang/for-lkp
commit: e479efd5dbb38db8f3b408adc9a194b29436fdc9
repeat_to: 2
testbox: brickland3
tbox_group: brickland3
kconfig: x86_64-rhel
enqueue_time: 2015-05-07 09:35:45.026776318 +08:00
user: lkp
queue: unit
compiler: gcc-4.9
head_commit: e479efd5dbb38db8f3b408adc9a194b29436fdc9
base_commit: b787f68c36d49bb1d9236f403813641efa74a031
kernel: "/pkg/linux/x86_64-rhel/gcc-4.9/e479efd5dbb38db8f3b408adc9a194b29436fdc9/vmlinuz-4.1.0-rc1-00004-ge479efd"
rootfs: debian-x86_64-2015-02-07.cgz
result_root: "/result/aim7/performance-2000-fork_test/brickland3/debian-x86_64-2015-02-07.cgz/x86_64-rhel/gcc-4.9/e479efd5dbb38db8f3b408adc9a194b29436fdc9/0"
LKP_SERVER: inn
job_file: "/lkp/scheduled/brickland3/unit_aim7-performance-2000-fork_test-x86_64-rhel-HEAD-e479efd5dbb38db8f3b408adc9a194b29436fdc9-1-20150507-28413-pppjch.yaml"
dequeue_time: 2015-05-07 11:40:43.629988684 +08:00
initrd: "/osimage/debian/debian-x86_64-2015-02-07.cgz"
bootloader_append:
- root=/dev/ram0
- user=lkp
- job=/lkp/scheduled/brickland3/unit_aim7-performance-2000-fork_test-x86_64-rhel-HEAD-e479efd5dbb38db8f3b408adc9a194b29436fdc9-1-20150507-28413-pppjch.yaml
- ARCH=x86_64
- kconfig=x86_64-rhel
- branch=yuyang/for-lkp
- commit=e479efd5dbb38db8f3b408adc9a194b29436fdc9
- BOOT_IMAGE=/pkg/linux/x86_64-rhel/gcc-4.9/e479efd5dbb38db8f3b408adc9a194b29436fdc9/vmlinuz-4.1.0-rc1-00004-ge479efd
- RESULT_ROOT=/result/aim7/performance-2000-fork_test/brickland3/debian-x86_64-2015-02-07.cgz/x86_64-rhel/gcc-4.9/e479efd5dbb38db8f3b408adc9a194b29436fdc9/0
- LKP_SERVER=inn
- |2-


earlyprintk=ttyS0,115200
debug apic=debug sysrq_always_enabled rcupdate.rcu_cpu_stall_timeout=100
panic=-1 softlockup_panic=1 nmi_watchdog=panic oops=panic load_ramdisk=2 prompt_ramdisk=0
console=ttyS0,115200 console=tty0 vga=normal

rw
max_uptime: 1851.0600000000002
lkp_initrd: "/lkp/lkp/lkp-x86_64.cgz"
modules_initrd: "/pkg/linux/x86_64-rhel/gcc-4.9/e479efd5dbb38db8f3b408adc9a194b29436fdc9/modules.cgz"
bm_initrd: "/osimage/deps/debian-x86_64-2015-02-07.cgz/lkp.cgz,/osimage/deps/debian-x86_64-2015-02-07.cgz/run-ipconfig.cgz,/osimage/deps/debian-x86_64-2015-02-07.cgz/turbostat.cgz,/lkp/benchmarks/turbostat.cgz,/lkp/benchmarks/aim7-x86_64.cgz"
job_state: finished
loadavg: 1451.96 1227.88 567.80 1/1116 100598
start_time: '1430970125'
end_time: '1430970463'
version: "/lkp/lkp/.src-20150507-111122"
echo performance > /sys/devices/system/cpu/cpu0/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu1/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu10/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu100/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu101/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu102/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu103/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu104/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu105/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu106/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu107/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu108/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu109/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu11/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu110/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu111/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu112/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu113/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu114/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu115/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu116/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu117/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu118/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu119/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu12/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu13/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu14/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu15/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu16/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu17/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu18/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu19/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu2/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu20/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu21/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu22/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu23/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu24/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu25/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu26/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu27/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu28/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu29/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu3/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu30/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu31/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu32/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu33/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu34/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu35/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu36/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu37/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu38/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu39/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu4/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu40/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu41/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu42/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu43/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu44/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu45/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu46/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu47/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu48/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu49/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu5/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu50/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu51/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu52/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu53/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu54/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu55/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu56/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu57/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu58/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu59/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu6/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu60/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu61/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu62/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu63/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu64/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu65/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu66/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu67/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu68/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu69/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu7/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu70/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu71/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu72/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu73/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu74/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu75/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu76/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu77/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu78/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu79/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu8/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu80/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu81/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu82/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu83/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu84/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu85/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu86/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu87/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu88/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu89/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu9/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu90/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu91/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu92/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu93/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu94/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu95/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu96/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu97/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu98/cpufreq/scaling_governor
echo performance > /sys/devices/system/cpu/cpu99/cpufreq/scaling_governor
_______________________________________________
LKP mailing list
LKP@xxxxxxxxxxxxxxx