x86, msr: execute on the correct CPU subset
authorBorislav Petkov <borislav.petkov@amd.com>
Thu, 30 Jul 2009 09:10:01 +0000 (11:10 +0200)
committerH. Peter Anvin <hpa@zytor.com>
Mon, 3 Aug 2009 21:48:13 +0000 (14:48 -0700)
Make rdmsr_on_cpus/wrmsr_on_cpus execute on the current CPU only if it
is in the supplied bitmask.

Signed-off-by: Borislav Petkov <borislav.petkov@amd.com>
Signed-off-by: H. Peter Anvin <hpa@zytor.com>
arch/x86/lib/msr.c

index 1440b9c0547e9bd668a7bedf683686c57f5d1db0..caa24aca8115707be495a026fe89f20baea5164a 100644 (file)
@@ -89,16 +89,13 @@ void rdmsr_on_cpus(const cpumask_t *mask, u32 msr_no, struct msr *msrs)
        rv.msrs   = msrs;
        rv.msr_no = msr_no;
 
-       preempt_disable();
-       /*
-        * FIXME: handle the CPU we're executing on separately for now until
-        * smp_call_function_many has been fixed to not skip it.
-        */
-       this_cpu = raw_smp_processor_id();
-       smp_call_function_single(this_cpu, __rdmsr_on_cpu, &rv, 1);
+       this_cpu = get_cpu();
+
+       if (cpumask_test_cpu(this_cpu, mask))
+               __rdmsr_on_cpu(&rv);
 
        smp_call_function_many(mask, __rdmsr_on_cpu, &rv, 1);
-       preempt_enable();
+       put_cpu();
 }
 EXPORT_SYMBOL(rdmsr_on_cpus);
 
@@ -121,16 +118,13 @@ void wrmsr_on_cpus(const cpumask_t *mask, u32 msr_no, struct msr *msrs)
        rv.msrs   = msrs;
        rv.msr_no = msr_no;
 
-       preempt_disable();
-       /*
-        * FIXME: handle the CPU we're executing on separately for now until
-        * smp_call_function_many has been fixed to not skip it.
-        */
-       this_cpu = raw_smp_processor_id();
-       smp_call_function_single(this_cpu, __wrmsr_on_cpu, &rv, 1);
+       this_cpu = get_cpu();
+
+       if (cpumask_test_cpu(this_cpu, mask))
+               __wrmsr_on_cpu(&rv);
 
        smp_call_function_many(mask, __wrmsr_on_cpu, &rv, 1);
-       preempt_enable();
+       put_cpu();
 }
 EXPORT_SYMBOL(wrmsr_on_cpus);