Blame view

kernel/sched/idle_task.c 2.35 KB
b24413180   Greg Kroah-Hartman   License cleanup: ...
1
  // SPDX-License-Identifier: GPL-2.0
029632fbb   Peter Zijlstra   sched: Make separ...
2
  #include "sched.h"
fa72e9e48   Ingo Molnar   sched: cfs core, ...
3
4
5
6
  /*
   * idle-task scheduling class.
   *
   * (NOTE: these are not related to SCHED_IDLE tasks which are
489a71b02   Hiroshi Shimamoto   sched: Update doc...
7
   *  handled in sched/fair.c)
fa72e9e48   Ingo Molnar   sched: cfs core, ...
8
   */
e7693a362   Gregory Haskins   sched: de-SCHED_O...
9
  #ifdef CONFIG_SMP
0017d7350   Peter Zijlstra   sched: Fix TASK_W...
10
  static int
ac66f5477   Peter Zijlstra   sched/numa: Intro...
11
  select_task_rq_idle(struct task_struct *p, int cpu, int sd_flag, int flags)
e7693a362   Gregory Haskins   sched: de-SCHED_O...
12
13
14
15
  {
  	return task_cpu(p); /* IDLE tasks as never migrated */
  }
  #endif /* CONFIG_SMP */
38033c37f   Peter Zijlstra   sched: Push down ...
16

fa72e9e48   Ingo Molnar   sched: cfs core, ...
17
18
19
  /*
   * Idle tasks are unconditionally rescheduled:
   */
7d4787214   Peter Zijlstra   sched: Rename syn...
20
  static void check_preempt_curr_idle(struct rq *rq, struct task_struct *p, int flags)
fa72e9e48   Ingo Molnar   sched: cfs core, ...
21
  {
8875125ef   Kirill Tkhai   sched: Transform ...
22
  	resched_curr(rq);
fa72e9e48   Ingo Molnar   sched: cfs core, ...
23
  }
606dba2e2   Peter Zijlstra   sched: Push put_p...
24
  static struct task_struct *
d8ac89713   Matt Fleming   sched/core: Add w...
25
  pick_next_task_idle(struct rq *rq, struct task_struct *prev, struct rq_flags *rf)
fa72e9e48   Ingo Molnar   sched: cfs core, ...
26
  {
3f1d2a318   Peter Zijlstra   sched: Fix hotplu...
27
  	put_prev_task(rq, prev);
10e2f1acd   Peter Zijlstra   sched/core: Rewri...
28
  	update_idle_core(rq);
ae92882e5   Josh Poimboeuf   sched/debug: Clea...
29
  	schedstat_inc(rq->sched_goidle);
fa72e9e48   Ingo Molnar   sched: cfs core, ...
30
31
32
33
34
35
36
37
  	return rq->idle;
  }
  
  /*
   * It is not legal to sleep in the idle task - print a warning
   * message if some code attempts to do it:
   */
  static void
371fd7e7a   Peter Zijlstra   sched: Add enqueu...
38
  dequeue_task_idle(struct rq *rq, struct task_struct *p, int flags)
fa72e9e48   Ingo Molnar   sched: cfs core, ...
39
  {
05fa785cf   Thomas Gleixner   sched: Convert rq...
40
  	raw_spin_unlock_irq(&rq->lock);
3df0fc5b2   Peter Zijlstra   sched: Restore pr...
41
42
  	printk(KERN_ERR "bad: scheduling from the idle thread!
  ");
fa72e9e48   Ingo Molnar   sched: cfs core, ...
43
  	dump_stack();
05fa785cf   Thomas Gleixner   sched: Convert rq...
44
  	raw_spin_lock_irq(&rq->lock);
fa72e9e48   Ingo Molnar   sched: cfs core, ...
45
  }
31ee529cc   Ingo Molnar   sched: remove the...
46
  static void put_prev_task_idle(struct rq *rq, struct task_struct *prev)
fa72e9e48   Ingo Molnar   sched: cfs core, ...
47
  {
38033c37f   Peter Zijlstra   sched: Push down ...
48
  	rq_last_tick_reset(rq);
fa72e9e48   Ingo Molnar   sched: cfs core, ...
49
  }
8f4d37ec0   Peter Zijlstra   sched: high-res p...
50
  static void task_tick_idle(struct rq *rq, struct task_struct *curr, int queued)
fa72e9e48   Ingo Molnar   sched: cfs core, ...
51
52
  {
  }
83b699ed2   Srivatsa Vaddagiri   sched: revert rec...
53
54
55
  static void set_curr_task_idle(struct rq *rq)
  {
  }
da7a735e5   Peter Zijlstra   sched: Fix switch...
56
  static void switched_to_idle(struct rq *rq, struct task_struct *p)
cb4698450   Steven Rostedt   sched: RT-balance...
57
  {
a8941d7ec   Peter Zijlstra   sched: Simplify t...
58
  	BUG();
cb4698450   Steven Rostedt   sched: RT-balance...
59
  }
da7a735e5   Peter Zijlstra   sched: Fix switch...
60
61
  static void
  prio_changed_idle(struct rq *rq, struct task_struct *p, int oldprio)
cb4698450   Steven Rostedt   sched: RT-balance...
62
  {
a8941d7ec   Peter Zijlstra   sched: Simplify t...
63
  	BUG();
cb4698450   Steven Rostedt   sched: RT-balance...
64
  }
6d686f456   H Hartley Sweeten   sched: Don't expo...
65
  static unsigned int get_rr_interval_idle(struct rq *rq, struct task_struct *task)
0d721cead   Peter Williams   sched: Simplify s...
66
67
68
  {
  	return 0;
  }
90e362f4a   Thomas Gleixner   sched: Provide up...
69
70
71
  static void update_curr_idle(struct rq *rq)
  {
  }
fa72e9e48   Ingo Molnar   sched: cfs core, ...
72
73
74
  /*
   * Simple, special scheduling class for the per-CPU idle tasks:
   */
029632fbb   Peter Zijlstra   sched: Make separ...
75
  const struct sched_class idle_sched_class = {
5522d5d5f   Ingo Molnar   sched: mark sched...
76
  	/* .next is NULL */
fa72e9e48   Ingo Molnar   sched: cfs core, ...
77
78
79
80
81
82
83
84
85
  	/* no enqueue/yield_task for idle tasks */
  
  	/* dequeue is not valid, we print a debug message there: */
  	.dequeue_task		= dequeue_task_idle,
  
  	.check_preempt_curr	= check_preempt_curr_idle,
  
  	.pick_next_task		= pick_next_task_idle,
  	.put_prev_task		= put_prev_task_idle,
681f3e685   Peter Williams   sched: isolate SM...
86
  #ifdef CONFIG_SMP
4ce72a2c0   Li Zefan   sched: add CONFIG...
87
  	.select_task_rq		= select_task_rq_idle,
c5b280384   Peter Zijlstra   sched: Make sched...
88
  	.set_cpus_allowed	= set_cpus_allowed_common,
681f3e685   Peter Williams   sched: isolate SM...
89
  #endif
fa72e9e48   Ingo Molnar   sched: cfs core, ...
90

83b699ed2   Srivatsa Vaddagiri   sched: revert rec...
91
  	.set_curr_task          = set_curr_task_idle,
fa72e9e48   Ingo Molnar   sched: cfs core, ...
92
  	.task_tick		= task_tick_idle,
cb4698450   Steven Rostedt   sched: RT-balance...
93

0d721cead   Peter Williams   sched: Simplify s...
94
  	.get_rr_interval	= get_rr_interval_idle,
cb4698450   Steven Rostedt   sched: RT-balance...
95
96
  	.prio_changed		= prio_changed_idle,
  	.switched_to		= switched_to_idle,
90e362f4a   Thomas Gleixner   sched: Provide up...
97
  	.update_curr		= update_curr_idle,
fa72e9e48   Ingo Molnar   sched: cfs core, ...
98
  };