a5d50dac735c8d96ed9cb2c7d745d84c9620ecb0
[firefly-linux-kernel-4.4.55.git] / arch / sparc / kernel / dma.c
1 /* dma.c: PCI and SBUS DMA accessors for 32-bit sparc.
2  *
3  * Copyright (C) 2008 David S. Miller <davem@davemloft.net>
4  */
5
6 #include <linux/kernel.h>
7 #include <linux/module.h>
8 #include <linux/dma-mapping.h>
9 #include <linux/scatterlist.h>
10 #include <linux/mm.h>
11
12 #ifdef CONFIG_PCI
13 #include <linux/pci.h>
14 #endif
15
16 #include "dma.h"
17
18 int dma_supported(struct device *dev, u64 mask)
19 {
20 #ifdef CONFIG_PCI
21         if (dev->bus == &pci_bus_type)
22                 return pci_dma_supported(to_pci_dev(dev), mask);
23 #endif
24         return 0;
25 }
26 EXPORT_SYMBOL(dma_supported);
27
28 int dma_set_mask(struct device *dev, u64 dma_mask)
29 {
30 #ifdef CONFIG_PCI
31         if (dev->bus == &pci_bus_type)
32                 return pci_set_dma_mask(to_pci_dev(dev), dma_mask);
33 #endif
34         return -EOPNOTSUPP;
35 }
36 EXPORT_SYMBOL(dma_set_mask);
37
38 static void *dma32_alloc_coherent(struct device *dev, size_t size,
39                                   dma_addr_t *dma_handle, gfp_t flag)
40 {
41 #ifdef CONFIG_PCI
42         if (dev->bus == &pci_bus_type)
43                 return pci_alloc_consistent(to_pci_dev(dev), size, dma_handle);
44 #endif
45         return sbus_alloc_consistent(dev, size, dma_handle);
46 }
47
48 static void dma32_free_coherent(struct device *dev, size_t size,
49                                 void *cpu_addr, dma_addr_t dma_handle)
50 {
51 #ifdef CONFIG_PCI
52         if (dev->bus == &pci_bus_type) {
53                 pci_free_consistent(to_pci_dev(dev), size,
54                                     cpu_addr, dma_handle);
55                 return;
56         }
57 #endif
58         sbus_free_consistent(dev, size, cpu_addr, dma_handle);
59 }
60
61 static dma_addr_t dma32_map_page(struct device *dev, struct page *page,
62                                  unsigned long offset, size_t size,
63                                  enum dma_data_direction direction,
64                                  struct dma_attrs *attrs)
65 {
66 #ifdef CONFIG_PCI
67         if (dev->bus == &pci_bus_type)
68                 return pci_map_page(to_pci_dev(dev), page, offset,
69                                     size, (int)direction);
70 #endif
71         return sbus_map_page(dev, page, offset, size, (int)direction);
72 }
73
74 static void dma32_unmap_page(struct device *dev, dma_addr_t dma_address,
75                              size_t size, enum dma_data_direction direction,
76                              struct dma_attrs *attrs)
77 {
78 #ifdef CONFIG_PCI
79         if (dev->bus == &pci_bus_type) {
80                 pci_unmap_page(to_pci_dev(dev), dma_address,
81                                size, (int)direction);
82                 return;
83         }
84 #endif
85         sbus_unmap_page(dev, dma_address, size, (int)direction);
86 }
87
88 static int dma32_map_sg(struct device *dev, struct scatterlist *sg,
89                         int nents, enum dma_data_direction direction,
90                         struct dma_attrs *attrs)
91 {
92 #ifdef CONFIG_PCI
93         if (dev->bus == &pci_bus_type)
94                 return pci_map_sg(to_pci_dev(dev), sg, nents, (int)direction);
95 #endif
96         return sbus_map_sg(dev, sg, nents, direction);
97 }
98
99 void dma32_unmap_sg(struct device *dev, struct scatterlist *sg,
100                     int nents, enum dma_data_direction direction,
101                     struct dma_attrs *attrs)
102 {
103 #ifdef CONFIG_PCI
104         if (dev->bus == &pci_bus_type) {
105                 pci_unmap_sg(to_pci_dev(dev), sg, nents, (int)direction);
106                 return;
107         }
108 #endif
109         sbus_unmap_sg(dev, sg, nents, (int)direction);
110 }
111
112 static void dma32_sync_single_for_cpu(struct device *dev, dma_addr_t dma_handle,
113                                       size_t size,
114                                       enum dma_data_direction direction)
115 {
116 #ifdef CONFIG_PCI
117         if (dev->bus == &pci_bus_type) {
118                 pci_dma_sync_single_for_cpu(to_pci_dev(dev), dma_handle,
119                                             size, (int)direction);
120                 return;
121         }
122 #endif
123         sbus_dma_sync_single_for_cpu(dev, dma_handle, size, (int) direction);
124 }
125
126 static void dma32_sync_single_for_device(struct device *dev,
127                                          dma_addr_t dma_handle, size_t size,
128                                          enum dma_data_direction direction)
129 {
130 #ifdef CONFIG_PCI
131         if (dev->bus == &pci_bus_type) {
132                 pci_dma_sync_single_for_device(to_pci_dev(dev), dma_handle,
133                                                size, (int)direction);
134                 return;
135         }
136 #endif
137         sbus_dma_sync_single_for_device(dev, dma_handle, size, (int) direction);
138 }
139
140 static void dma32_sync_sg_for_cpu(struct device *dev, struct scatterlist *sg,
141                                   int nelems, enum dma_data_direction direction)
142 {
143 #ifdef CONFIG_PCI
144         if (dev->bus == &pci_bus_type) {
145                 pci_dma_sync_sg_for_cpu(to_pci_dev(dev), sg,
146                                         nelems, (int)direction);
147                 return;
148         }
149 #endif
150         BUG();
151 }
152
153 static void dma32_sync_sg_for_device(struct device *dev,
154                                      struct scatterlist *sg, int nelems,
155                                      enum dma_data_direction direction)
156 {
157 #ifdef CONFIG_PCI
158         if (dev->bus == &pci_bus_type) {
159                 pci_dma_sync_sg_for_device(to_pci_dev(dev), sg,
160                                            nelems, (int)direction);
161                 return;
162         }
163 #endif
164         BUG();
165 }
166
167 static struct dma_map_ops dma32_dma_ops = {
168         .alloc_coherent         = dma32_alloc_coherent,
169         .free_coherent          = dma32_free_coherent,
170         .map_page               = dma32_map_page,
171         .unmap_page             = dma32_unmap_page,
172         .map_sg                 = dma32_map_sg,
173         .unmap_sg               = dma32_unmap_sg,
174         .sync_single_for_cpu    = dma32_sync_single_for_cpu,
175         .sync_single_for_device = dma32_sync_single_for_device,
176         .sync_sg_for_cpu        = dma32_sync_sg_for_cpu,
177         .sync_sg_for_device     = dma32_sync_sg_for_device,
178 };
179
180 struct dma_map_ops *dma_ops = &dma32_dma_ops;
181 EXPORT_SYMBOL(dma_ops);