/* pointer to parallel array of pointers to packets */
struct sk_buff **txp;
/* Aligned physical address of descriptor ring */
- unsigned long txdpa;
+ dma_addr_t txdpa;
/* Original physical address of descriptor ring */
- unsigned long txdpaorig;
+ dma_addr_t txdpaorig;
u16 txdalign; /* #bytes added to alloc'd mem to align txd */
u32 txdalloc; /* #bytes allocated for the ring */
u32 xmtptrbase; /* When using unaligned descriptors, the ptr register
/* pointer to parallel array of pointers to packets */
struct sk_buff **rxp;
/* Aligned physical address of descriptor ring */
- unsigned long rxdpa;
+ dma_addr_t rxdpa;
/* Original physical address of descriptor ring */
- unsigned long rxdpaorig;
+ dma_addr_t rxdpaorig;
u16 rxdalign; /* #bytes added to alloc'd mem to align rxd */
u32 rxdalloc; /* #bytes allocated for the ring */
u32 rcvptrbase; /* Base for ptr reg when using unaligned descriptors */
static bool _dma_descriptor_align(struct dma_info *di);
static bool _dma_alloc(struct dma_info *di, uint direction);
static void _dma_ddtable_init(struct dma_info *di, uint direction,
- unsigned long pa);
+ dma_addr_t pa);
static void _dma_rxenable(struct dma_info *di);
static struct sk_buff *_dma_getnextrxp(struct dma_info *di, bool forceall);
static uint _dma_ctrlflags(struct dma_info *di, uint mask, uint flags);
static u8 dma_align_sizetobits(uint size);
static void *dma_ringalloc(struct dma_info *di, u32 boundary, uint size,
u16 *alignbits, uint *alloced,
- unsigned long *descpa);
+ dma_addr_t *descpa);
/* Prototypes for 64-bit routines */
static bool dma64_alloc(struct dma_info *di, uint direction);
static inline void
dma64_dd_upd(struct dma_info *di, struct dma64desc *ddring,
- unsigned long pa, uint outidx, u32 *flags, u32 bufcount)
+ dma_addr_t pa, uint outidx, u32 *flags, u32 bufcount)
{
u32 ctrl2 = bufcount & D64_CTRL2_BC_MASK;
}
void *dma_alloc_consistent(struct pci_dev *pdev, uint size, u16 align_bits,
- uint *alloced, unsigned long *pap)
+ uint *alloced, dma_addr_t *pap)
{
- void *rc;
- dma_addr_t dma_addr;
-
if (align_bits) {
u16 align = (1 << align_bits);
if (!IS_ALIGNED(PAGE_SIZE, align))
size += align;
*alloced = size;
}
- rc = pci_alloc_consistent(pdev, size, &dma_addr);
- *pap = dma_addr;
- return rc;
+ return pci_alloc_consistent(pdev, size, pap);
}
/* !! may be called with core in reset */
/* initialize descriptor table base address */
static void
-_dma_ddtable_init(struct dma_info *di, uint direction, unsigned long pa)
+_dma_ddtable_init(struct dma_info *di, uint direction, dma_addr_t pa)
{
if (!di->aligndesc_4k) {
if (direction == DMA_TX)
u32 flags = 0;
uint n;
uint i;
- unsigned long pa;
+ dma_addr_t pa;
uint extra_offset = 0;
bool ring_empty;
*/
static void *dma_ringalloc(struct dma_info *di, u32 boundary, uint size,
u16 *alignbits, uint *alloced,
- unsigned long *descpa)
+ dma_addr_t *descpa)
{
void *va;
u32 desc_strtaddr;
uint len;
u16 txout;
u32 flags = 0;
- unsigned long pa;
+ dma_addr_t pa;
DMA_TRACE(("%s: dma_txfast\n", di->name));
goto bogus;
for (i = start; i != end && !txp; i = NEXTTXD(i)) {
- unsigned long pa;
+ dma_addr_t pa;
uint size;
pa = cpu_to_le32(di->txd64[i].addrlow) - di->dataoffsetlow;
{
uint i, curr;
struct sk_buff *rxp;
- unsigned long pa;
+ dma_addr_t pa;
i = di->rxin;