Commit 0fb2a6f283f25731217841f961cefa0a19bd449d

Authored by Eli Billauer
Committed by Michal Simek
1 parent cf560c1801

microblaze: Added DMA sync operations

Added support gor dma_direct_sync_single_for_*() and dma_direct_sync_sg_for_*()

Signed-off-by: Eli Billauer <eli.billauer@gmail.com>
Signed-off-by: Michal Simek <monstr@monstr.eu>

Showing 1 changed file with 60 additions and 0 deletions Side-by-side Diff

arch/microblaze/kernel/dma.c
... ... @@ -118,6 +118,62 @@
118 118 __dma_sync(dma_address, size, direction);
119 119 }
120 120  
  121 +static inline void
  122 +dma_direct_sync_single_for_cpu(struct device *dev,
  123 + dma_addr_t dma_handle, size_t size,
  124 + enum dma_data_direction direction)
  125 +{
  126 + /*
  127 + * It's pointless to flush the cache as the memory segment
  128 + * is given to the CPU
  129 + */
  130 +
  131 + if (direction == DMA_FROM_DEVICE)
  132 + __dma_sync(dma_handle, size, direction);
  133 +}
  134 +
  135 +static inline void
  136 +dma_direct_sync_single_for_device(struct device *dev,
  137 + dma_addr_t dma_handle, size_t size,
  138 + enum dma_data_direction direction)
  139 +{
  140 + /*
  141 + * It's pointless to invalidate the cache if the device isn't
  142 + * supposed to write to the relevant region
  143 + */
  144 +
  145 + if (direction == DMA_TO_DEVICE)
  146 + __dma_sync(dma_handle, size, direction);
  147 +}
  148 +
  149 +static inline void
  150 +dma_direct_sync_sg_for_cpu(struct device *dev,
  151 + struct scatterlist *sgl, int nents,
  152 + enum dma_data_direction direction)
  153 +{
  154 + struct scatterlist *sg;
  155 + int i;
  156 +
  157 + /* FIXME this part of code is untested */
  158 + if (direction == DMA_FROM_DEVICE)
  159 + for_each_sg(sgl, sg, nents, i)
  160 + __dma_sync(sg->dma_address, sg->length, direction);
  161 +}
  162 +
  163 +static inline void
  164 +dma_direct_sync_sg_for_device(struct device *dev,
  165 + struct scatterlist *sgl, int nents,
  166 + enum dma_data_direction direction)
  167 +{
  168 + struct scatterlist *sg;
  169 + int i;
  170 +
  171 + /* FIXME this part of code is untested */
  172 + if (direction == DMA_TO_DEVICE)
  173 + for_each_sg(sgl, sg, nents, i)
  174 + __dma_sync(sg->dma_address, sg->length, direction);
  175 +}
  176 +
121 177 struct dma_map_ops dma_direct_ops = {
122 178 .alloc_coherent = dma_direct_alloc_coherent,
123 179 .free_coherent = dma_direct_free_coherent,
... ... @@ -126,6 +182,10 @@
126 182 .dma_supported = dma_direct_dma_supported,
127 183 .map_page = dma_direct_map_page,
128 184 .unmap_page = dma_direct_unmap_page,
  185 + .sync_single_for_cpu = dma_direct_sync_single_for_cpu,
  186 + .sync_single_for_device = dma_direct_sync_single_for_device,
  187 + .sync_sg_for_cpu = dma_direct_sync_sg_for_cpu,
  188 + .sync_sg_for_device = dma_direct_sync_sg_for_device,
129 189 };
130 190 EXPORT_SYMBOL(dma_direct_ops);
131 191