[PATCH 2/3] x86/pmu: Replace X86_ALL_EVENT_FLAGS with INTEL_ALL_EVENT_FLAGS

From: Ravi Bangoria
Date: Mon Feb 21 2022 - 02:32:56 EST


X86_ALL_EVENT_FLAGS has Intel specific flags and it's used only by
Intel specific macros, i.e. it's not x86 generic macro. Rename it
to INTEL_ALL_EVENT_FLAGS. No functionality changes.

Signed-off-by: Ravi Bangoria <ravi.bangoria@xxxxxxx>
---
arch/x86/events/intel/core.c | 2 +-
arch/x86/events/perf_event.h | 32 +++++++++++++++----------------
arch/x86/include/asm/perf_event.h | 2 +-
3 files changed, 18 insertions(+), 18 deletions(-)

diff --git a/arch/x86/events/intel/core.c b/arch/x86/events/intel/core.c
index 9a72fd8ddab9..54aba01a23a6 100644
--- a/arch/x86/events/intel/core.c
+++ b/arch/x86/events/intel/core.c
@@ -3835,7 +3835,7 @@ static int intel_pmu_hw_config(struct perf_event *event)
* The TopDown metrics events and slots event don't
* support any filters.
*/
- if (event->attr.config & X86_ALL_EVENT_FLAGS)
+ if (event->attr.config & INTEL_ALL_EVENT_FLAGS)
return -EINVAL;

if (is_available_metric_event(event)) {
diff --git a/arch/x86/events/perf_event.h b/arch/x86/events/perf_event.h
index e789b390d90c..6bad5d4e6f17 100644
--- a/arch/x86/events/perf_event.h
+++ b/arch/x86/events/perf_event.h
@@ -439,86 +439,86 @@ struct cpu_hw_events {

/* Like UEVENT_CONSTRAINT, but match flags too */
#define INTEL_FLAGS_UEVENT_CONSTRAINT(c, n) \
- EVENT_CONSTRAINT(c, n, INTEL_ARCH_EVENT_MASK|X86_ALL_EVENT_FLAGS)
+ EVENT_CONSTRAINT(c, n, INTEL_ARCH_EVENT_MASK|INTEL_ALL_EVENT_FLAGS)

#define INTEL_EXCLUEVT_CONSTRAINT(c, n) \
__EVENT_CONSTRAINT(c, n, INTEL_ARCH_EVENT_MASK, \
HWEIGHT(n), 0, PERF_X86_EVENT_EXCL)

#define INTEL_PLD_CONSTRAINT(c, n) \
- __EVENT_CONSTRAINT(c, n, INTEL_ARCH_EVENT_MASK|X86_ALL_EVENT_FLAGS, \
+ __EVENT_CONSTRAINT(c, n, INTEL_ARCH_EVENT_MASK|INTEL_ALL_EVENT_FLAGS, \
HWEIGHT(n), 0, PERF_X86_EVENT_PEBS_LDLAT)

#define INTEL_PSD_CONSTRAINT(c, n) \
- __EVENT_CONSTRAINT(c, n, INTEL_ARCH_EVENT_MASK|X86_ALL_EVENT_FLAGS, \
+ __EVENT_CONSTRAINT(c, n, INTEL_ARCH_EVENT_MASK|INTEL_ALL_EVENT_FLAGS, \
HWEIGHT(n), 0, PERF_X86_EVENT_PEBS_STLAT)

#define INTEL_PST_CONSTRAINT(c, n) \
- __EVENT_CONSTRAINT(c, n, INTEL_ARCH_EVENT_MASK|X86_ALL_EVENT_FLAGS, \
+ __EVENT_CONSTRAINT(c, n, INTEL_ARCH_EVENT_MASK|INTEL_ALL_EVENT_FLAGS, \
HWEIGHT(n), 0, PERF_X86_EVENT_PEBS_ST)

/* Event constraint, but match on all event flags too. */
#define INTEL_FLAGS_EVENT_CONSTRAINT(c, n) \
- EVENT_CONSTRAINT(c, n, ARCH_PERFMON_EVENTSEL_EVENT|X86_ALL_EVENT_FLAGS)
+ EVENT_CONSTRAINT(c, n, ARCH_PERFMON_EVENTSEL_EVENT|INTEL_ALL_EVENT_FLAGS)

#define INTEL_FLAGS_EVENT_CONSTRAINT_RANGE(c, e, n) \
- EVENT_CONSTRAINT_RANGE(c, e, n, ARCH_PERFMON_EVENTSEL_EVENT|X86_ALL_EVENT_FLAGS)
+ EVENT_CONSTRAINT_RANGE(c, e, n, ARCH_PERFMON_EVENTSEL_EVENT|INTEL_ALL_EVENT_FLAGS)

/* Check only flags, but allow all event/umask */
#define INTEL_ALL_EVENT_CONSTRAINT(code, n) \
- EVENT_CONSTRAINT(code, n, X86_ALL_EVENT_FLAGS)
+ EVENT_CONSTRAINT(code, n, INTEL_ALL_EVENT_FLAGS)

/* Check flags and event code, and set the HSW store flag */
#define INTEL_FLAGS_EVENT_CONSTRAINT_DATALA_ST(code, n) \
__EVENT_CONSTRAINT(code, n, \
- ARCH_PERFMON_EVENTSEL_EVENT|X86_ALL_EVENT_FLAGS, \
+ ARCH_PERFMON_EVENTSEL_EVENT|INTEL_ALL_EVENT_FLAGS, \
HWEIGHT(n), 0, PERF_X86_EVENT_PEBS_ST_HSW)

/* Check flags and event code, and set the HSW load flag */
#define INTEL_FLAGS_EVENT_CONSTRAINT_DATALA_LD(code, n) \
__EVENT_CONSTRAINT(code, n, \
- ARCH_PERFMON_EVENTSEL_EVENT|X86_ALL_EVENT_FLAGS, \
+ ARCH_PERFMON_EVENTSEL_EVENT|INTEL_ALL_EVENT_FLAGS, \
HWEIGHT(n), 0, PERF_X86_EVENT_PEBS_LD_HSW)

#define INTEL_FLAGS_EVENT_CONSTRAINT_DATALA_LD_RANGE(code, end, n) \
__EVENT_CONSTRAINT_RANGE(code, end, n, \
- ARCH_PERFMON_EVENTSEL_EVENT|X86_ALL_EVENT_FLAGS, \
+ ARCH_PERFMON_EVENTSEL_EVENT|INTEL_ALL_EVENT_FLAGS, \
HWEIGHT(n), 0, PERF_X86_EVENT_PEBS_LD_HSW)

#define INTEL_FLAGS_EVENT_CONSTRAINT_DATALA_XLD(code, n) \
__EVENT_CONSTRAINT(code, n, \
- ARCH_PERFMON_EVENTSEL_EVENT|X86_ALL_EVENT_FLAGS, \
+ ARCH_PERFMON_EVENTSEL_EVENT|INTEL_ALL_EVENT_FLAGS, \
HWEIGHT(n), 0, \
PERF_X86_EVENT_PEBS_LD_HSW|PERF_X86_EVENT_EXCL)

/* Check flags and event code/umask, and set the HSW store flag */
#define INTEL_FLAGS_UEVENT_CONSTRAINT_DATALA_ST(code, n) \
__EVENT_CONSTRAINT(code, n, \
- INTEL_ARCH_EVENT_MASK|X86_ALL_EVENT_FLAGS, \
+ INTEL_ARCH_EVENT_MASK|INTEL_ALL_EVENT_FLAGS, \
HWEIGHT(n), 0, PERF_X86_EVENT_PEBS_ST_HSW)

#define INTEL_FLAGS_UEVENT_CONSTRAINT_DATALA_XST(code, n) \
__EVENT_CONSTRAINT(code, n, \
- INTEL_ARCH_EVENT_MASK|X86_ALL_EVENT_FLAGS, \
+ INTEL_ARCH_EVENT_MASK|INTEL_ALL_EVENT_FLAGS, \
HWEIGHT(n), 0, \
PERF_X86_EVENT_PEBS_ST_HSW|PERF_X86_EVENT_EXCL)

/* Check flags and event code/umask, and set the HSW load flag */
#define INTEL_FLAGS_UEVENT_CONSTRAINT_DATALA_LD(code, n) \
__EVENT_CONSTRAINT(code, n, \
- INTEL_ARCH_EVENT_MASK|X86_ALL_EVENT_FLAGS, \
+ INTEL_ARCH_EVENT_MASK|INTEL_ALL_EVENT_FLAGS, \
HWEIGHT(n), 0, PERF_X86_EVENT_PEBS_LD_HSW)

#define INTEL_FLAGS_UEVENT_CONSTRAINT_DATALA_XLD(code, n) \
__EVENT_CONSTRAINT(code, n, \
- INTEL_ARCH_EVENT_MASK|X86_ALL_EVENT_FLAGS, \
+ INTEL_ARCH_EVENT_MASK|INTEL_ALL_EVENT_FLAGS, \
HWEIGHT(n), 0, \
PERF_X86_EVENT_PEBS_LD_HSW|PERF_X86_EVENT_EXCL)

/* Check flags and event code/umask, and set the HSW N/A flag */
#define INTEL_FLAGS_UEVENT_CONSTRAINT_DATALA_NA(code, n) \
__EVENT_CONSTRAINT(code, n, \
- INTEL_ARCH_EVENT_MASK|X86_ALL_EVENT_FLAGS, \
+ INTEL_ARCH_EVENT_MASK|INTEL_ALL_EVENT_FLAGS, \
HWEIGHT(n), 0, PERF_X86_EVENT_PEBS_NA_HSW)


diff --git a/arch/x86/include/asm/perf_event.h b/arch/x86/include/asm/perf_event.h
index 002e67661330..216173a82ccc 100644
--- a/arch/x86/include/asm/perf_event.h
+++ b/arch/x86/include/asm/perf_event.h
@@ -73,7 +73,7 @@
ARCH_PERFMON_EVENTSEL_EDGE | \
ARCH_PERFMON_EVENTSEL_INV | \
ARCH_PERFMON_EVENTSEL_CMASK)
-#define X86_ALL_EVENT_FLAGS \
+#define INTEL_ALL_EVENT_FLAGS \
(ARCH_PERFMON_EVENTSEL_EDGE | \
ARCH_PERFMON_EVENTSEL_INV | \
ARCH_PERFMON_EVENTSEL_CMASK | \
--
2.27.0