Commit 63213196fd4b05b9c3539cbe34775c60f1f6fad0

Authored by David S. Miller
1 parent 7a883eaf62

[SPARC64]: Add missing dma_sync_single_range_for_*().

Reported by Andrew Morton.

Signed-off-by: David S. Miller <davem@davemloft.net>

Showing 1 changed file with 19 additions and 0 deletions Inline Diff

include/asm-sparc64/dma-mapping.h
1 #ifndef _ASM_SPARC64_DMA_MAPPING_H 1 #ifndef _ASM_SPARC64_DMA_MAPPING_H
2 #define _ASM_SPARC64_DMA_MAPPING_H 2 #define _ASM_SPARC64_DMA_MAPPING_H
3 3
4 #include <linux/scatterlist.h> 4 #include <linux/scatterlist.h>
5 #include <linux/mm.h> 5 #include <linux/mm.h>
6 6
7 #define DMA_ERROR_CODE (~(dma_addr_t)0x0) 7 #define DMA_ERROR_CODE (~(dma_addr_t)0x0)
8 8
9 struct dma_ops { 9 struct dma_ops {
10 void *(*alloc_coherent)(struct device *dev, size_t size, 10 void *(*alloc_coherent)(struct device *dev, size_t size,
11 dma_addr_t *dma_handle, gfp_t flag); 11 dma_addr_t *dma_handle, gfp_t flag);
12 void (*free_coherent)(struct device *dev, size_t size, 12 void (*free_coherent)(struct device *dev, size_t size,
13 void *cpu_addr, dma_addr_t dma_handle); 13 void *cpu_addr, dma_addr_t dma_handle);
14 dma_addr_t (*map_single)(struct device *dev, void *cpu_addr, 14 dma_addr_t (*map_single)(struct device *dev, void *cpu_addr,
15 size_t size, 15 size_t size,
16 enum dma_data_direction direction); 16 enum dma_data_direction direction);
17 void (*unmap_single)(struct device *dev, dma_addr_t dma_addr, 17 void (*unmap_single)(struct device *dev, dma_addr_t dma_addr,
18 size_t size, 18 size_t size,
19 enum dma_data_direction direction); 19 enum dma_data_direction direction);
20 int (*map_sg)(struct device *dev, struct scatterlist *sg, int nents, 20 int (*map_sg)(struct device *dev, struct scatterlist *sg, int nents,
21 enum dma_data_direction direction); 21 enum dma_data_direction direction);
22 void (*unmap_sg)(struct device *dev, struct scatterlist *sg, 22 void (*unmap_sg)(struct device *dev, struct scatterlist *sg,
23 int nhwentries, 23 int nhwentries,
24 enum dma_data_direction direction); 24 enum dma_data_direction direction);
25 void (*sync_single_for_cpu)(struct device *dev, 25 void (*sync_single_for_cpu)(struct device *dev,
26 dma_addr_t dma_handle, size_t size, 26 dma_addr_t dma_handle, size_t size,
27 enum dma_data_direction direction); 27 enum dma_data_direction direction);
28 void (*sync_single_for_device)(struct device *dev, 28 void (*sync_single_for_device)(struct device *dev,
29 dma_addr_t dma_handle, size_t size, 29 dma_addr_t dma_handle, size_t size,
30 enum dma_data_direction direction); 30 enum dma_data_direction direction);
31 void (*sync_sg_for_cpu)(struct device *dev, struct scatterlist *sg, 31 void (*sync_sg_for_cpu)(struct device *dev, struct scatterlist *sg,
32 int nelems, 32 int nelems,
33 enum dma_data_direction direction); 33 enum dma_data_direction direction);
34 void (*sync_sg_for_device)(struct device *dev, struct scatterlist *sg, 34 void (*sync_sg_for_device)(struct device *dev, struct scatterlist *sg,
35 int nelems, 35 int nelems,
36 enum dma_data_direction direction); 36 enum dma_data_direction direction);
37 }; 37 };
38 extern const struct dma_ops *dma_ops; 38 extern const struct dma_ops *dma_ops;
39 39
40 extern int dma_supported(struct device *dev, u64 mask); 40 extern int dma_supported(struct device *dev, u64 mask);
41 extern int dma_set_mask(struct device *dev, u64 dma_mask); 41 extern int dma_set_mask(struct device *dev, u64 dma_mask);
42 42
43 static inline void *dma_alloc_coherent(struct device *dev, size_t size, 43 static inline void *dma_alloc_coherent(struct device *dev, size_t size,
44 dma_addr_t *dma_handle, gfp_t flag) 44 dma_addr_t *dma_handle, gfp_t flag)
45 { 45 {
46 return dma_ops->alloc_coherent(dev, size, dma_handle, flag); 46 return dma_ops->alloc_coherent(dev, size, dma_handle, flag);
47 } 47 }
48 48
49 static inline void dma_free_coherent(struct device *dev, size_t size, 49 static inline void dma_free_coherent(struct device *dev, size_t size,
50 void *cpu_addr, dma_addr_t dma_handle) 50 void *cpu_addr, dma_addr_t dma_handle)
51 { 51 {
52 dma_ops->free_coherent(dev, size, cpu_addr, dma_handle); 52 dma_ops->free_coherent(dev, size, cpu_addr, dma_handle);
53 } 53 }
54 54
55 static inline dma_addr_t dma_map_single(struct device *dev, void *cpu_addr, 55 static inline dma_addr_t dma_map_single(struct device *dev, void *cpu_addr,
56 size_t size, 56 size_t size,
57 enum dma_data_direction direction) 57 enum dma_data_direction direction)
58 { 58 {
59 return dma_ops->map_single(dev, cpu_addr, size, direction); 59 return dma_ops->map_single(dev, cpu_addr, size, direction);
60 } 60 }
61 61
62 static inline void dma_unmap_single(struct device *dev, dma_addr_t dma_addr, 62 static inline void dma_unmap_single(struct device *dev, dma_addr_t dma_addr,
63 size_t size, 63 size_t size,
64 enum dma_data_direction direction) 64 enum dma_data_direction direction)
65 { 65 {
66 dma_ops->unmap_single(dev, dma_addr, size, direction); 66 dma_ops->unmap_single(dev, dma_addr, size, direction);
67 } 67 }
68 68
69 static inline dma_addr_t dma_map_page(struct device *dev, struct page *page, 69 static inline dma_addr_t dma_map_page(struct device *dev, struct page *page,
70 unsigned long offset, size_t size, 70 unsigned long offset, size_t size,
71 enum dma_data_direction direction) 71 enum dma_data_direction direction)
72 { 72 {
73 return dma_ops->map_single(dev, page_address(page) + offset, 73 return dma_ops->map_single(dev, page_address(page) + offset,
74 size, direction); 74 size, direction);
75 } 75 }
76 76
77 static inline void dma_unmap_page(struct device *dev, dma_addr_t dma_address, 77 static inline void dma_unmap_page(struct device *dev, dma_addr_t dma_address,
78 size_t size, 78 size_t size,
79 enum dma_data_direction direction) 79 enum dma_data_direction direction)
80 { 80 {
81 dma_ops->unmap_single(dev, dma_address, size, direction); 81 dma_ops->unmap_single(dev, dma_address, size, direction);
82 } 82 }
83 83
84 static inline int dma_map_sg(struct device *dev, struct scatterlist *sg, 84 static inline int dma_map_sg(struct device *dev, struct scatterlist *sg,
85 int nents, enum dma_data_direction direction) 85 int nents, enum dma_data_direction direction)
86 { 86 {
87 return dma_ops->map_sg(dev, sg, nents, direction); 87 return dma_ops->map_sg(dev, sg, nents, direction);
88 } 88 }
89 89
90 static inline void dma_unmap_sg(struct device *dev, struct scatterlist *sg, 90 static inline void dma_unmap_sg(struct device *dev, struct scatterlist *sg,
91 int nents, enum dma_data_direction direction) 91 int nents, enum dma_data_direction direction)
92 { 92 {
93 dma_ops->unmap_sg(dev, sg, nents, direction); 93 dma_ops->unmap_sg(dev, sg, nents, direction);
94 } 94 }
95 95
96 static inline void dma_sync_single_for_cpu(struct device *dev, 96 static inline void dma_sync_single_for_cpu(struct device *dev,
97 dma_addr_t dma_handle, size_t size, 97 dma_addr_t dma_handle, size_t size,
98 enum dma_data_direction direction) 98 enum dma_data_direction direction)
99 { 99 {
100 dma_ops->sync_single_for_cpu(dev, dma_handle, size, direction); 100 dma_ops->sync_single_for_cpu(dev, dma_handle, size, direction);
101 } 101 }
102 102
103 static inline void dma_sync_single_for_device(struct device *dev, 103 static inline void dma_sync_single_for_device(struct device *dev,
104 dma_addr_t dma_handle, 104 dma_addr_t dma_handle,
105 size_t size, 105 size_t size,
106 enum dma_data_direction direction) 106 enum dma_data_direction direction)
107 { 107 {
108 dma_ops->sync_single_for_device(dev, dma_handle, size, direction); 108 dma_ops->sync_single_for_device(dev, dma_handle, size, direction);
109 } 109 }
110 110
111 static inline void dma_sync_single_range_for_cpu(struct device *dev,
112 dma_addr_t dma_handle,
113 unsigned long offset,
114 size_t size,
115 enum dma_data_direction direction)
116 {
117 dma_sync_single_for_cpu(dev, dma_handle+offset, size, direction);
118 }
119
120 static inline void dma_sync_single_range_for_device(struct device *dev,
121 dma_addr_t dma_handle,
122 unsigned long offset,
123 size_t size,
124 enum dma_data_direction direction)
125 {
126 dma_sync_single_for_device(dev, dma_handle+offset, size, direction);
127 }
128
129
111 static inline void dma_sync_sg_for_cpu(struct device *dev, 130 static inline void dma_sync_sg_for_cpu(struct device *dev,
112 struct scatterlist *sg, int nelems, 131 struct scatterlist *sg, int nelems,
113 enum dma_data_direction direction) 132 enum dma_data_direction direction)
114 { 133 {
115 dma_ops->sync_sg_for_cpu(dev, sg, nelems, direction); 134 dma_ops->sync_sg_for_cpu(dev, sg, nelems, direction);
116 } 135 }
117 136
118 static inline void dma_sync_sg_for_device(struct device *dev, 137 static inline void dma_sync_sg_for_device(struct device *dev,
119 struct scatterlist *sg, int nelems, 138 struct scatterlist *sg, int nelems,
120 enum dma_data_direction direction) 139 enum dma_data_direction direction)
121 { 140 {
122 dma_ops->sync_sg_for_device(dev, sg, nelems, direction); 141 dma_ops->sync_sg_for_device(dev, sg, nelems, direction);
123 } 142 }
124 143
125 static inline int dma_mapping_error(dma_addr_t dma_addr) 144 static inline int dma_mapping_error(dma_addr_t dma_addr)
126 { 145 {
127 return (dma_addr == DMA_ERROR_CODE); 146 return (dma_addr == DMA_ERROR_CODE);
128 } 147 }
129 148
130 static inline int dma_get_cache_alignment(void) 149 static inline int dma_get_cache_alignment(void)
131 { 150 {
132 /* no easy way to get cache size on all processors, so return 151 /* no easy way to get cache size on all processors, so return
133 * the maximum possible, to be safe */ 152 * the maximum possible, to be safe */
134 return (1 << INTERNODE_CACHE_SHIFT); 153 return (1 << INTERNODE_CACHE_SHIFT);
135 } 154 }
136 155
137 #define dma_alloc_noncoherent(d, s, h, f) dma_alloc_coherent(d, s, h, f) 156 #define dma_alloc_noncoherent(d, s, h, f) dma_alloc_coherent(d, s, h, f)
138 #define dma_free_noncoherent(d, s, v, h) dma_free_coherent(d, s, v, h) 157 #define dma_free_noncoherent(d, s, v, h) dma_free_coherent(d, s, v, h)
139 #define dma_is_consistent(d, h) (1) 158 #define dma_is_consistent(d, h) (1)
140 159
141 #endif /* _ASM_SPARC64_DMA_MAPPING_H */ 160 #endif /* _ASM_SPARC64_DMA_MAPPING_H */
142 161