@@ -2768,6 +2768,35 @@ intel_stop_scheduling(struct cpu_hw_events *cpuc)
2768
2768
raw_spin_unlock (& excl_cntrs -> lock );
2769
2769
}
2770
2770
2771
+ static struct event_constraint *
2772
+ dyn_constraint (struct cpu_hw_events * cpuc , struct event_constraint * c , int idx )
2773
+ {
2774
+ WARN_ON_ONCE (!cpuc -> constraint_list );
2775
+
2776
+ if (!(c -> flags & PERF_X86_EVENT_DYNAMIC )) {
2777
+ struct event_constraint * cx ;
2778
+
2779
+ /*
2780
+ * grab pre-allocated constraint entry
2781
+ */
2782
+ cx = & cpuc -> constraint_list [idx ];
2783
+
2784
+ /*
2785
+ * initialize dynamic constraint
2786
+ * with static constraint
2787
+ */
2788
+ * cx = * c ;
2789
+
2790
+ /*
2791
+ * mark constraint as dynamic
2792
+ */
2793
+ cx -> flags |= PERF_X86_EVENT_DYNAMIC ;
2794
+ c = cx ;
2795
+ }
2796
+
2797
+ return c ;
2798
+ }
2799
+
2771
2800
static struct event_constraint *
2772
2801
intel_get_excl_constraints (struct cpu_hw_events * cpuc , struct perf_event * event ,
2773
2802
int idx , struct event_constraint * c )
@@ -2798,27 +2827,7 @@ intel_get_excl_constraints(struct cpu_hw_events *cpuc, struct perf_event *event,
2798
2827
* only needed when constraint has not yet
2799
2828
* been cloned (marked dynamic)
2800
2829
*/
2801
- if (!(c -> flags & PERF_X86_EVENT_DYNAMIC )) {
2802
- struct event_constraint * cx ;
2803
-
2804
- /*
2805
- * grab pre-allocated constraint entry
2806
- */
2807
- cx = & cpuc -> constraint_list [idx ];
2808
-
2809
- /*
2810
- * initialize dynamic constraint
2811
- * with static constraint
2812
- */
2813
- * cx = * c ;
2814
-
2815
- /*
2816
- * mark constraint as dynamic, so we
2817
- * can free it later on
2818
- */
2819
- cx -> flags |= PERF_X86_EVENT_DYNAMIC ;
2820
- c = cx ;
2821
- }
2830
+ c = dyn_constraint (cpuc , c , idx );
2822
2831
2823
2832
/*
2824
2833
* From here on, the constraint is dynamic.
0 commit comments