Blame view
kernel/up.c
2.25 KB
457c89965 treewide: Add SPD... |
1 |
// SPDX-License-Identifier: GPL-2.0-only |
53ce3d956 smp_call_function... |
2 3 4 |
/* * Uniprocessor-only support functions. The counterpart to kernel/smp.c */ |
6e9628141 smp_call_function... |
5 |
#include <linux/interrupt.h> |
53ce3d956 smp_call_function... |
6 |
#include <linux/kernel.h> |
9984de1a5 kernel: Map most ... |
7 |
#include <linux/export.h> |
53ce3d956 smp_call_function... |
8 |
#include <linux/smp.h> |
47ae4b05d virt, sched: Add ... |
9 |
#include <linux/hypervisor.h> |
53ce3d956 smp_call_function... |
10 11 12 13 |
int smp_call_function_single(int cpu, void (*func) (void *info), void *info, int wait) { |
081192b25 up.c: use local_i... |
14 |
unsigned long flags; |
1e474b28e smp/up: Make smp_... |
15 16 |
if (cpu != 0) return -ENXIO; |
93423b866 smp_call_function... |
17 |
|
081192b25 up.c: use local_i... |
18 19 20 |
local_irq_save(flags); func(info); local_irq_restore(flags); |
93423b866 smp_call_function... |
21 |
|
53ce3d956 smp_call_function... |
22 23 24 |
return 0; } EXPORT_SYMBOL(smp_call_function_single); |
fa688207c smp: quit uncondi... |
25 |
|
966a96711 smp: Avoid using ... |
26 |
int smp_call_function_single_async(int cpu, call_single_data_t *csd) |
40c01e8bd kernel: provide a... |
27 28 29 30 31 32 |
{ unsigned long flags; local_irq_save(flags); csd->func(csd->info); local_irq_restore(flags); |
08eed44c7 smp: Teach __smp_... |
33 |
return 0; |
40c01e8bd kernel: provide a... |
34 |
} |
c46fff2a3 smp: Rename __smp... |
35 |
EXPORT_SYMBOL(smp_call_function_single_async); |
40c01e8bd kernel: provide a... |
36 |
|
caa759323 smp: Remove smp_c... |
37 |
void on_each_cpu(smp_call_func_t func, void *info, int wait) |
bff2dc42b smp.h: move !SMP ... |
38 39 40 41 42 43 |
{ unsigned long flags; local_irq_save(flags); func(info); local_irq_restore(flags); |
bff2dc42b smp.h: move !SMP ... |
44 45 |
} EXPORT_SYMBOL(on_each_cpu); |
fa688207c smp: quit uncondi... |
46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 |
/* * Note we still need to test the mask even for UP * because we actually can get an empty mask from * code that on SMP might call us without the local * CPU in the mask. */ void on_each_cpu_mask(const struct cpumask *mask, smp_call_func_t func, void *info, bool wait) { unsigned long flags; if (cpumask_test_cpu(0, mask)) { local_irq_save(flags); func(info); local_irq_restore(flags); } } EXPORT_SYMBOL(on_each_cpu_mask); /* * Preemption is disabled here to make sure the cond_func is called under the * same condtions in UP and SMP. */ |
5671d814d smp: Use smp_cond... |
69 |
void on_each_cpu_cond_mask(smp_cond_func_t cond_func, smp_call_func_t func, |
cb923159b smp: Remove alloc... |
70 |
void *info, bool wait, const struct cpumask *mask) |
fa688207c smp: quit uncondi... |
71 72 73 74 75 76 77 78 79 80 81 |
{ unsigned long flags; preempt_disable(); if (cond_func(0, info)) { local_irq_save(flags); func(info); local_irq_restore(flags); } preempt_enable(); } |
7d49b28a8 smp,cpumask: intr... |
82 |
EXPORT_SYMBOL(on_each_cpu_cond_mask); |
5671d814d smp: Use smp_cond... |
83 |
void on_each_cpu_cond(smp_cond_func_t cond_func, smp_call_func_t func, |
cb923159b smp: Remove alloc... |
84 |
void *info, bool wait) |
7d49b28a8 smp,cpumask: intr... |
85 |
{ |
cb923159b smp: Remove alloc... |
86 |
on_each_cpu_cond_mask(cond_func, func, info, wait, NULL); |
7d49b28a8 smp,cpumask: intr... |
87 |
} |
fa688207c smp: quit uncondi... |
88 |
EXPORT_SYMBOL(on_each_cpu_cond); |
df8ce9d78 smp: Add function... |
89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 |
int smp_call_on_cpu(unsigned int cpu, int (*func)(void *), void *par, bool phys) { int ret; if (cpu != 0) return -ENXIO; if (phys) hypervisor_pin_vcpu(0); ret = func(par); if (phys) hypervisor_pin_vcpu(-1); return ret; } EXPORT_SYMBOL_GPL(smp_call_on_cpu); |