void *data,
int status)
{
- struct mtip_cmd *command;
+ struct mtip_cmd *cmd;
struct driver_data *dd = data;
- int cb_status = status ? -EIO : 0;
+ int unaligned, cb_status = status ? -EIO : 0;
+ void (*func)(void *, int);
if (unlikely(!dd) || unlikely(!port))
return;
- command = &port->commands[tag];
+ cmd = &port->commands[tag];
if (unlikely(status == PORT_IRQ_TF_ERR)) {
dev_warn(&port->dd->pdev->dev,
"Command tag %d failed due to TFE\n", tag);
}
- /* Unmap the DMA scatter list entries */
- dma_unmap_sg(&dd->pdev->dev,
- command->sg,
- command->scatter_ents,
- command->direction);
+ /* Clear the active flag */
+ atomic_set(&port->commands[tag].active, 0);
/* Upper layer callback */
- if (likely(command->async_callback))
- command->async_callback(command->async_data, cb_status);
+ func = cmd->async_callback;
+ if (likely(func && cmpxchg(&cmd->async_callback, func, 0) == func)) {
- command->async_callback = NULL;
- command->comp_func = NULL;
+ /* Unmap the DMA scatter list entries */
+ dma_unmap_sg(&dd->pdev->dev,
+ cmd->sg,
+ cmd->scatter_ents,
+ cmd->direction);
- /* Clear the allocated and active bits for the command */
- atomic_set(&port->commands[tag].active, 0);
- release_slot(port, tag);
+ func(cmd->async_data, cb_status);
+ unaligned = cmd->unaligned;
- up(&port->cmd_slot);
+ /* Clear the allocated bit for the command */
+ release_slot(port, tag);
+
+ if (unlikely(unaligned))
+ up(&port->cmd_slot_unal);
+ else
+ up(&port->cmd_slot);
+ }
}
/*
{
struct mtip_port *port = (struct mtip_port *) data;
struct host_to_dev_fis *fis;
- struct mtip_cmd *command;
- int tag, cmdto_cnt = 0;
+ struct mtip_cmd *cmd;
+ int unaligned, tag, cmdto_cnt = 0;
unsigned int bit, group;
unsigned int num_command_slots;
unsigned long to, tagaccum[SLOTBITS_IN_LONGS];
+ void (*func)(void *, int);
if (unlikely(!port))
return;
group = tag >> 5;
bit = tag & 0x1F;
- command = &port->commands[tag];
- fis = (struct host_to_dev_fis *) command->command;
+ cmd = &port->commands[tag];
+ fis = (struct host_to_dev_fis *) cmd->command;
set_bit(tag, tagaccum);
cmdto_cnt++;
*/
writel(1 << bit, port->completed[group]);
- /* Unmap the DMA scatter list entries */
- dma_unmap_sg(&port->dd->pdev->dev,
- command->sg,
- command->scatter_ents,
- command->direction);
+ /* Clear the active flag for the command */
+ atomic_set(&port->commands[tag].active, 0);
- /* Call the async completion callback. */
- if (likely(command->async_callback))
- command->async_callback(command->async_data,
- -EIO);
- command->async_callback = NULL;
- command->comp_func = NULL;
+ func = cmd->async_callback;
+ if (func &&
+ cmpxchg(&cmd->async_callback, func, 0) == func) {
- /*
- * Clear the allocated bit and active tag for the
- * command.
- */
- atomic_set(&port->commands[tag].active, 0);
- release_slot(port, tag);
+ /* Unmap the DMA scatter list entries */
+ dma_unmap_sg(&port->dd->pdev->dev,
+ cmd->sg,
+ cmd->scatter_ents,
+ cmd->direction);
- up(&port->cmd_slot);
+ func(cmd->async_data, -EIO);
+ unaligned = cmd->unaligned;
+
+ /* Clear the allocated bit for the command. */
+ release_slot(port, tag);
+
+ if (unaligned)
+ up(&port->cmd_slot_unal);
+ else
+ up(&port->cmd_slot);
+ }
}
}