Blame view
fs/io-wq.h
3.59 KB
771b53d03 io-wq: small thre... |
1 2 3 4 5 6 7 |
#ifndef INTERNAL_IO_WQ_H #define INTERNAL_IO_WQ_H struct io_wq; enum { IO_WQ_WORK_CANCEL = 1, |
e883a79d8 io-wq: compact io... |
8 9 10 11 |
IO_WQ_WORK_HASHED = 2, IO_WQ_WORK_UNBOUND = 4, IO_WQ_WORK_NO_CANCEL = 8, IO_WQ_WORK_CONCURRENT = 16, |
771b53d03 io-wq: small thre... |
12 13 14 15 16 17 18 19 20 |
IO_WQ_HASH_SHIFT = 24, /* upper 8 bits are used for hash key */ }; enum io_wq_cancel { IO_WQ_CANCEL_OK, /* cancelled before started */ IO_WQ_CANCEL_RUNNING, /* found, running, and attempted cancelled */ IO_WQ_CANCEL_NOTFOUND, /* work not found */ }; |
6206f0e18 io-wq: shrink io_... |
21 22 23 24 25 26 27 28 |
struct io_wq_work_node { struct io_wq_work_node *next; }; struct io_wq_work_list { struct io_wq_work_node *first; struct io_wq_work_node *last; }; |
86f3cd1b5 io-wq: handle has... |
29 30 31 32 33 34 35 36 37 38 39 |
static inline void wq_list_add_after(struct io_wq_work_node *node, struct io_wq_work_node *pos, struct io_wq_work_list *list) { struct io_wq_work_node *next = pos->next; pos->next = node; node->next = next; if (!next) list->last = node; } |
6206f0e18 io-wq: shrink io_... |
40 41 42 43 |
static inline void wq_list_add_tail(struct io_wq_work_node *node, struct io_wq_work_list *list) { if (!list->first) { |
e995d5123 io-wq: briefly sp... |
44 45 |
list->last = node; WRITE_ONCE(list->first, node); |
6206f0e18 io-wq: shrink io_... |
46 47 48 49 50 |
} else { list->last->next = node; list->last = node; } } |
86f3cd1b5 io-wq: handle has... |
51 52 |
static inline void wq_list_cut(struct io_wq_work_list *list, struct io_wq_work_node *last, |
6206f0e18 io-wq: shrink io_... |
53 54 |
struct io_wq_work_node *prev) { |
86f3cd1b5 io-wq: handle has... |
55 56 57 58 59 60 61 |
/* first in the list, if prev==NULL */ if (!prev) WRITE_ONCE(list->first, last->next); else prev->next = last->next; if (last == list->last) |
6206f0e18 io-wq: shrink io_... |
62 |
list->last = prev; |
86f3cd1b5 io-wq: handle has... |
63 64 65 66 67 68 69 70 |
last->next = NULL; } static inline void wq_list_del(struct io_wq_work_list *list, struct io_wq_work_node *node, struct io_wq_work_node *prev) { wq_list_cut(list, node, prev); |
6206f0e18 io-wq: shrink io_... |
71 72 73 74 |
} #define wq_list_for_each(pos, prv, head) \ for (pos = (head)->first, prv = NULL; pos; prv = pos, pos = (pos)->next) |
e995d5123 io-wq: briefly sp... |
75 |
#define wq_list_empty(list) (READ_ONCE((list)->first) == NULL) |
6206f0e18 io-wq: shrink io_... |
76 77 78 79 |
#define INIT_WQ_LIST(list) do { \ (list)->first = NULL; \ (list)->last = NULL; \ } while (0) |
771b53d03 io-wq: small thre... |
80 |
struct io_wq_work { |
18a542ff1 io_uring: Fix ->d... |
81 |
struct io_wq_work_node list; |
fcb323cc5 io_uring: io_urin... |
82 |
struct files_struct *files; |
cccf0ee83 io_uring/io-wq: d... |
83 84 |
struct mm_struct *mm; const struct cred *creds; |
9392a27d8 io-wq: add suppor... |
85 |
struct fs_struct *fs; |
57f1a6495 io_uring/io-wq: m... |
86 |
unsigned long fsize; |
6206f0e18 io-wq: shrink io_... |
87 |
unsigned flags; |
771b53d03 io-wq: small thre... |
88 |
}; |
86f3cd1b5 io-wq: handle has... |
89 90 91 92 93 94 95 |
static inline struct io_wq_work *wq_next_work(struct io_wq_work *work) { if (!work->list.next) return NULL; return container_of(work->list.next, struct io_wq_work, list); } |
e9fd93965 io_uring/io-wq: f... |
96 |
typedef void (free_work_fn)(struct io_wq_work *); |
f4db7182e io-wq: return nex... |
97 |
typedef struct io_wq_work *(io_wq_work_fn)(struct io_wq_work *); |
7d7230652 io_wq: add get/pu... |
98 |
|
576a347b7 io-wq: have io_wq... |
99 |
struct io_wq_data { |
576a347b7 io-wq: have io_wq... |
100 |
struct user_struct *user; |
f5fa38c59 io_wq: add per-wq... |
101 |
io_wq_work_fn *do_work; |
e9fd93965 io_uring/io-wq: f... |
102 |
free_work_fn *free_work; |
576a347b7 io-wq: have io_wq... |
103 104 105 |
}; struct io_wq *io_wq_create(unsigned bounded, struct io_wq_data *data); |
eba6f5a33 io-wq: allow grab... |
106 |
bool io_wq_get(struct io_wq *wq, struct io_wq_data *data); |
771b53d03 io-wq: small thre... |
107 108 109 |
void io_wq_destroy(struct io_wq *wq); void io_wq_enqueue(struct io_wq *wq, struct io_wq_work *work); |
8766dd516 io-wq: split hash... |
110 111 112 113 114 115 |
void io_wq_hash_work(struct io_wq_work *work, void *val); static inline bool io_wq_is_hashed(struct io_wq_work *work) { return work->flags & IO_WQ_WORK_HASHED; } |
771b53d03 io-wq: small thre... |
116 117 118 |
void io_wq_cancel_all(struct io_wq *wq); enum io_wq_cancel io_wq_cancel_work(struct io_wq *wq, struct io_wq_work *cwork); |
62755e35d io_uring: support... |
119 120 121 |
typedef bool (work_cancel_fn)(struct io_wq_work *, void *); enum io_wq_cancel io_wq_cancel_cb(struct io_wq *wq, work_cancel_fn *cancel, |
4f26bda15 io-wq: add an opt... |
122 |
void *data, bool cancel_all); |
62755e35d io_uring: support... |
123 |
|
aa96bf8a9 io_uring: use io-... |
124 |
struct task_struct *io_wq_get_task(struct io_wq *wq); |
771b53d03 io-wq: small thre... |
125 126 127 128 129 130 131 132 133 134 |
#if defined(CONFIG_IO_WQ) extern void io_wq_worker_sleeping(struct task_struct *); extern void io_wq_worker_running(struct task_struct *); #else static inline void io_wq_worker_sleeping(struct task_struct *tsk) { } static inline void io_wq_worker_running(struct task_struct *tsk) { } |
525b305d6 io-wq: re-add io_... |
135 |
#endif |
771b53d03 io-wq: small thre... |
136 |
|
525b305d6 io-wq: re-add io_... |
137 138 139 140 141 |
static inline bool io_wq_current_is_worker(void) { return in_task() && (current->flags & PF_IO_WORKER); } #endif |