2 * Copyright (c) 2010-2011 Atheros Communications Inc.
4 * Permission to use, copy, modify, and/or distribute this software for any
5 * purpose with or without fee is hereby granted, provided that the above
6 * copyright notice and this permission notice appear in all copies.
8 * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
9 * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
10 * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
11 * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
12 * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
13 * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
14 * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
17 #include <asm/unaligned.h>
20 /* identify firmware images */
21 #define FIRMWARE_AR7010_1_1 "htc_7010.fw"
22 #define FIRMWARE_AR9271 "htc_9271.fw"
24 MODULE_FIRMWARE(FIRMWARE_AR7010_1_1);
25 MODULE_FIRMWARE(FIRMWARE_AR9271);
27 static struct usb_device_id ath9k_hif_usb_ids[] = {
28 { USB_DEVICE(0x0cf3, 0x9271) }, /* Atheros */
29 { USB_DEVICE(0x0cf3, 0x1006) }, /* Atheros */
30 { USB_DEVICE(0x0846, 0x9030) }, /* Netgear N150 */
31 { USB_DEVICE(0x07D1, 0x3A10) }, /* Dlink Wireless 150 */
32 { USB_DEVICE(0x13D3, 0x3327) }, /* Azurewave */
33 { USB_DEVICE(0x13D3, 0x3328) }, /* Azurewave */
34 { USB_DEVICE(0x13D3, 0x3346) }, /* IMC Networks */
35 { USB_DEVICE(0x13D3, 0x3348) }, /* Azurewave */
36 { USB_DEVICE(0x13D3, 0x3349) }, /* Azurewave */
37 { USB_DEVICE(0x13D3, 0x3350) }, /* Azurewave */
38 { USB_DEVICE(0x04CA, 0x4605) }, /* Liteon */
39 { USB_DEVICE(0x040D, 0x3801) }, /* VIA */
40 { USB_DEVICE(0x0cf3, 0xb003) }, /* Ubiquiti WifiStation Ext */
42 { USB_DEVICE(0x0cf3, 0x7015),
43 .driver_info = AR9287_USB }, /* Atheros */
44 { USB_DEVICE(0x1668, 0x1200),
45 .driver_info = AR9287_USB }, /* Verizon */
47 { USB_DEVICE(0x0cf3, 0x7010),
48 .driver_info = AR9280_USB }, /* Atheros */
49 { USB_DEVICE(0x0846, 0x9018),
50 .driver_info = AR9280_USB }, /* Netgear WNDA3200 */
51 { USB_DEVICE(0x083A, 0xA704),
52 .driver_info = AR9280_USB }, /* SMC Networks */
53 { USB_DEVICE(0x0411, 0x017f),
54 .driver_info = AR9280_USB }, /* Sony UWA-BR100 */
56 { USB_DEVICE(0x0cf3, 0x20ff),
57 .driver_info = STORAGE_DEVICE },
62 MODULE_DEVICE_TABLE(usb, ath9k_hif_usb_ids);
64 static int __hif_usb_tx(struct hif_device_usb *hif_dev);
66 static void hif_usb_regout_cb(struct urb *urb)
68 struct cmd_buf *cmd = (struct cmd_buf *)urb->context;
70 switch (urb->status) {
83 ath9k_htc_txcompletion_cb(cmd->hif_dev->htc_handle,
94 static int hif_usb_send_regout(struct hif_device_usb *hif_dev,
101 urb = usb_alloc_urb(0, GFP_KERNEL);
105 cmd = kzalloc(sizeof(*cmd), GFP_KERNEL);
112 cmd->hif_dev = hif_dev;
114 usb_fill_bulk_urb(urb, hif_dev->udev,
115 usb_sndbulkpipe(hif_dev->udev, USB_REG_OUT_PIPE),
117 hif_usb_regout_cb, cmd);
119 usb_anchor_urb(urb, &hif_dev->regout_submitted);
120 ret = usb_submit_urb(urb, GFP_KERNEL);
122 usb_unanchor_urb(urb);
130 static void hif_usb_mgmt_cb(struct urb *urb)
132 struct cmd_buf *cmd = (struct cmd_buf *)urb->context;
133 struct hif_device_usb *hif_dev = cmd->hif_dev;
136 if (!cmd || !cmd->skb || !cmd->hif_dev)
139 switch (urb->status) {
149 * If the URBs are being flushed, no need to complete
152 spin_lock(&hif_dev->tx.tx_lock);
153 if (hif_dev->tx.flags & HIF_USB_TX_FLUSH) {
154 spin_unlock(&hif_dev->tx.tx_lock);
155 dev_kfree_skb_any(cmd->skb);
159 spin_unlock(&hif_dev->tx.tx_lock);
167 skb_pull(cmd->skb, 4);
168 ath9k_htc_txcompletion_cb(cmd->hif_dev->htc_handle,
173 static int hif_usb_send_mgmt(struct hif_device_usb *hif_dev,
181 urb = usb_alloc_urb(0, GFP_ATOMIC);
185 cmd = kzalloc(sizeof(*cmd), GFP_ATOMIC);
192 cmd->hif_dev = hif_dev;
194 hdr = (__le16 *) skb_push(skb, 4);
195 *hdr++ = cpu_to_le16(skb->len - 4);
196 *hdr++ = cpu_to_le16(ATH_USB_TX_STREAM_MODE_TAG);
198 usb_fill_bulk_urb(urb, hif_dev->udev,
199 usb_sndbulkpipe(hif_dev->udev, USB_WLAN_TX_PIPE),
201 hif_usb_mgmt_cb, cmd);
203 usb_anchor_urb(urb, &hif_dev->mgmt_submitted);
204 ret = usb_submit_urb(urb, GFP_ATOMIC);
206 usb_unanchor_urb(urb);
214 static inline void ath9k_skb_queue_purge(struct hif_device_usb *hif_dev,
215 struct sk_buff_head *list)
219 while ((skb = __skb_dequeue(list)) != NULL) {
220 dev_kfree_skb_any(skb);
224 static inline void ath9k_skb_queue_complete(struct hif_device_usb *hif_dev,
225 struct sk_buff_head *queue,
230 while ((skb = __skb_dequeue(queue)) != NULL) {
231 ath9k_htc_txcompletion_cb(hif_dev->htc_handle,
234 TX_STAT_INC(skb_success);
236 TX_STAT_INC(skb_failed);
240 static void hif_usb_tx_cb(struct urb *urb)
242 struct tx_buf *tx_buf = (struct tx_buf *) urb->context;
243 struct hif_device_usb *hif_dev;
246 if (!tx_buf || !tx_buf->hif_dev)
249 hif_dev = tx_buf->hif_dev;
251 switch (urb->status) {
261 * If the URBs are being flushed, no need to add this
262 * URB to the free list.
264 spin_lock(&hif_dev->tx.tx_lock);
265 if (hif_dev->tx.flags & HIF_USB_TX_FLUSH) {
266 spin_unlock(&hif_dev->tx.tx_lock);
267 ath9k_skb_queue_purge(hif_dev, &tx_buf->skb_queue);
270 spin_unlock(&hif_dev->tx.tx_lock);
278 ath9k_skb_queue_complete(hif_dev, &tx_buf->skb_queue, txok);
280 /* Re-initialize the SKB queue */
281 tx_buf->len = tx_buf->offset = 0;
282 __skb_queue_head_init(&tx_buf->skb_queue);
284 /* Add this TX buffer to the free list */
285 spin_lock(&hif_dev->tx.tx_lock);
286 list_move_tail(&tx_buf->list, &hif_dev->tx.tx_buf);
287 hif_dev->tx.tx_buf_cnt++;
288 if (!(hif_dev->tx.flags & HIF_USB_TX_STOP))
289 __hif_usb_tx(hif_dev); /* Check for pending SKBs */
290 TX_STAT_INC(buf_completed);
291 spin_unlock(&hif_dev->tx.tx_lock);
294 /* TX lock has to be taken */
295 static int __hif_usb_tx(struct hif_device_usb *hif_dev)
297 struct tx_buf *tx_buf = NULL;
298 struct sk_buff *nskb = NULL;
304 if (hif_dev->tx.tx_skb_cnt == 0)
307 /* Check if a free TX buffer is available */
308 if (list_empty(&hif_dev->tx.tx_buf))
311 tx_buf = list_first_entry(&hif_dev->tx.tx_buf, struct tx_buf, list);
312 list_move_tail(&tx_buf->list, &hif_dev->tx.tx_pending);
313 hif_dev->tx.tx_buf_cnt--;
315 tx_skb_cnt = min_t(u16, hif_dev->tx.tx_skb_cnt, MAX_TX_AGGR_NUM);
317 for (i = 0; i < tx_skb_cnt; i++) {
318 nskb = __skb_dequeue(&hif_dev->tx.tx_skb_queue);
320 /* Should never be NULL */
323 hif_dev->tx.tx_skb_cnt--;
326 buf += tx_buf->offset;
328 *hdr++ = cpu_to_le16(nskb->len);
329 *hdr++ = cpu_to_le16(ATH_USB_TX_STREAM_MODE_TAG);
331 memcpy(buf, nskb->data, nskb->len);
332 tx_buf->len = nskb->len + 4;
334 if (i < (tx_skb_cnt - 1))
335 tx_buf->offset += (((tx_buf->len - 1) / 4) + 1) * 4;
337 if (i == (tx_skb_cnt - 1))
338 tx_buf->len += tx_buf->offset;
340 __skb_queue_tail(&tx_buf->skb_queue, nskb);
341 TX_STAT_INC(skb_queued);
344 usb_fill_bulk_urb(tx_buf->urb, hif_dev->udev,
345 usb_sndbulkpipe(hif_dev->udev, USB_WLAN_TX_PIPE),
346 tx_buf->buf, tx_buf->len,
347 hif_usb_tx_cb, tx_buf);
349 ret = usb_submit_urb(tx_buf->urb, GFP_ATOMIC);
351 tx_buf->len = tx_buf->offset = 0;
352 ath9k_skb_queue_complete(hif_dev, &tx_buf->skb_queue, false);
353 __skb_queue_head_init(&tx_buf->skb_queue);
354 list_move_tail(&tx_buf->list, &hif_dev->tx.tx_buf);
355 hif_dev->tx.tx_buf_cnt++;
359 TX_STAT_INC(buf_queued);
364 static int hif_usb_send_tx(struct hif_device_usb *hif_dev, struct sk_buff *skb)
366 struct ath9k_htc_tx_ctl *tx_ctl;
370 spin_lock_irqsave(&hif_dev->tx.tx_lock, flags);
372 if (hif_dev->tx.flags & HIF_USB_TX_STOP) {
373 spin_unlock_irqrestore(&hif_dev->tx.tx_lock, flags);
377 /* Check if the max queue count has been reached */
378 if (hif_dev->tx.tx_skb_cnt > MAX_TX_BUF_NUM) {
379 spin_unlock_irqrestore(&hif_dev->tx.tx_lock, flags);
383 spin_unlock_irqrestore(&hif_dev->tx.tx_lock, flags);
385 tx_ctl = HTC_SKB_CB(skb);
387 /* Mgmt/Beacon frames don't use the TX buffer pool */
388 if ((tx_ctl->type == ATH9K_HTC_MGMT) ||
389 (tx_ctl->type == ATH9K_HTC_BEACON)) {
390 ret = hif_usb_send_mgmt(hif_dev, skb);
393 spin_lock_irqsave(&hif_dev->tx.tx_lock, flags);
395 if ((tx_ctl->type == ATH9K_HTC_NORMAL) ||
396 (tx_ctl->type == ATH9K_HTC_AMPDU)) {
397 __skb_queue_tail(&hif_dev->tx.tx_skb_queue, skb);
398 hif_dev->tx.tx_skb_cnt++;
401 /* Check if AMPDUs have to be sent immediately */
402 if ((hif_dev->tx.tx_buf_cnt == MAX_TX_URB_NUM) &&
403 (hif_dev->tx.tx_skb_cnt < 2)) {
404 __hif_usb_tx(hif_dev);
407 spin_unlock_irqrestore(&hif_dev->tx.tx_lock, flags);
412 static void hif_usb_start(void *hif_handle)
414 struct hif_device_usb *hif_dev = (struct hif_device_usb *)hif_handle;
417 hif_dev->flags |= HIF_USB_START;
419 spin_lock_irqsave(&hif_dev->tx.tx_lock, flags);
420 hif_dev->tx.flags &= ~HIF_USB_TX_STOP;
421 spin_unlock_irqrestore(&hif_dev->tx.tx_lock, flags);
424 static void hif_usb_stop(void *hif_handle)
426 struct hif_device_usb *hif_dev = (struct hif_device_usb *)hif_handle;
427 struct tx_buf *tx_buf = NULL, *tx_buf_tmp = NULL;
430 spin_lock_irqsave(&hif_dev->tx.tx_lock, flags);
431 ath9k_skb_queue_complete(hif_dev, &hif_dev->tx.tx_skb_queue, false);
432 hif_dev->tx.tx_skb_cnt = 0;
433 hif_dev->tx.flags |= HIF_USB_TX_STOP;
434 spin_unlock_irqrestore(&hif_dev->tx.tx_lock, flags);
436 /* The pending URBs have to be canceled. */
437 list_for_each_entry_safe(tx_buf, tx_buf_tmp,
438 &hif_dev->tx.tx_pending, list) {
439 usb_kill_urb(tx_buf->urb);
442 usb_kill_anchored_urbs(&hif_dev->mgmt_submitted);
445 static int hif_usb_send(void *hif_handle, u8 pipe_id, struct sk_buff *skb)
447 struct hif_device_usb *hif_dev = (struct hif_device_usb *)hif_handle;
451 case USB_WLAN_TX_PIPE:
452 ret = hif_usb_send_tx(hif_dev, skb);
454 case USB_REG_OUT_PIPE:
455 ret = hif_usb_send_regout(hif_dev, skb);
458 dev_err(&hif_dev->udev->dev,
459 "ath9k_htc: Invalid TX pipe: %d\n", pipe_id);
467 static inline bool check_index(struct sk_buff *skb, u8 idx)
469 struct ath9k_htc_tx_ctl *tx_ctl;
471 tx_ctl = HTC_SKB_CB(skb);
473 if ((tx_ctl->type == ATH9K_HTC_AMPDU) &&
474 (tx_ctl->sta_idx == idx))
480 static void hif_usb_sta_drain(void *hif_handle, u8 idx)
482 struct hif_device_usb *hif_dev = (struct hif_device_usb *)hif_handle;
483 struct sk_buff *skb, *tmp;
486 spin_lock_irqsave(&hif_dev->tx.tx_lock, flags);
488 skb_queue_walk_safe(&hif_dev->tx.tx_skb_queue, skb, tmp) {
489 if (check_index(skb, idx)) {
490 __skb_unlink(skb, &hif_dev->tx.tx_skb_queue);
491 ath9k_htc_txcompletion_cb(hif_dev->htc_handle,
493 hif_dev->tx.tx_skb_cnt--;
494 TX_STAT_INC(skb_failed);
498 spin_unlock_irqrestore(&hif_dev->tx.tx_lock, flags);
501 static struct ath9k_htc_hif hif_usb = {
502 .transport = ATH9K_HIF_USB,
503 .name = "ath9k_hif_usb",
505 .control_ul_pipe = USB_REG_OUT_PIPE,
506 .control_dl_pipe = USB_REG_IN_PIPE,
508 .start = hif_usb_start,
509 .stop = hif_usb_stop,
510 .sta_drain = hif_usb_sta_drain,
511 .send = hif_usb_send,
514 static void ath9k_hif_usb_rx_stream(struct hif_device_usb *hif_dev,
517 struct sk_buff *nskb, *skb_pool[MAX_PKT_NUM_IN_TRANSFER];
518 int index = 0, i = 0, len = skb->len;
519 int rx_remain_len, rx_pkt_len;
523 spin_lock(&hif_dev->rx_lock);
525 rx_remain_len = hif_dev->rx_remain_len;
526 rx_pkt_len = hif_dev->rx_transfer_len;
528 if (rx_remain_len != 0) {
529 struct sk_buff *remain_skb = hif_dev->remain_skb;
532 ptr = (u8 *) remain_skb->data;
534 index = rx_remain_len;
535 rx_remain_len -= hif_dev->rx_pad_len;
538 memcpy(ptr, skb->data, rx_remain_len);
540 rx_pkt_len += rx_remain_len;
541 hif_dev->rx_remain_len = 0;
542 skb_put(remain_skb, rx_pkt_len);
544 skb_pool[pool_index++] = remain_skb;
547 index = rx_remain_len;
551 spin_unlock(&hif_dev->rx_lock);
553 while (index < len) {
559 ptr = (u8 *) skb->data;
561 pkt_len = get_unaligned_le16(ptr + index);
562 pkt_tag = get_unaligned_le16(ptr + index + 2);
564 if (pkt_tag != ATH_USB_RX_STREAM_MODE_TAG) {
565 RX_STAT_INC(skb_dropped);
569 pad_len = 4 - (pkt_len & 0x3);
574 index = index + 4 + pkt_len + pad_len;
576 if (index > MAX_RX_BUF_SIZE) {
577 spin_lock(&hif_dev->rx_lock);
578 hif_dev->rx_remain_len = index - MAX_RX_BUF_SIZE;
579 hif_dev->rx_transfer_len =
580 MAX_RX_BUF_SIZE - chk_idx - 4;
581 hif_dev->rx_pad_len = pad_len;
583 nskb = __dev_alloc_skb(pkt_len + 32, GFP_ATOMIC);
585 dev_err(&hif_dev->udev->dev,
586 "ath9k_htc: RX memory allocation error\n");
587 spin_unlock(&hif_dev->rx_lock);
590 skb_reserve(nskb, 32);
591 RX_STAT_INC(skb_allocated);
593 memcpy(nskb->data, &(skb->data[chk_idx+4]),
594 hif_dev->rx_transfer_len);
596 /* Record the buffer pointer */
597 hif_dev->remain_skb = nskb;
598 spin_unlock(&hif_dev->rx_lock);
600 nskb = __dev_alloc_skb(pkt_len + 32, GFP_ATOMIC);
602 dev_err(&hif_dev->udev->dev,
603 "ath9k_htc: RX memory allocation error\n");
606 skb_reserve(nskb, 32);
607 RX_STAT_INC(skb_allocated);
609 memcpy(nskb->data, &(skb->data[chk_idx+4]), pkt_len);
610 skb_put(nskb, pkt_len);
611 skb_pool[pool_index++] = nskb;
616 for (i = 0; i < pool_index; i++) {
617 ath9k_htc_rx_msg(hif_dev->htc_handle, skb_pool[i],
618 skb_pool[i]->len, USB_WLAN_RX_PIPE);
619 RX_STAT_INC(skb_completed);
623 static void ath9k_hif_usb_rx_cb(struct urb *urb)
625 struct sk_buff *skb = (struct sk_buff *) urb->context;
626 struct hif_device_usb *hif_dev =
627 usb_get_intfdata(usb_ifnum_to_if(urb->dev, 0));
636 switch (urb->status) {
648 if (likely(urb->actual_length != 0)) {
649 skb_put(skb, urb->actual_length);
650 ath9k_hif_usb_rx_stream(hif_dev, skb);
654 skb_reset_tail_pointer(skb);
657 usb_anchor_urb(urb, &hif_dev->rx_submitted);
658 ret = usb_submit_urb(urb, GFP_ATOMIC);
660 usb_unanchor_urb(urb);
669 static void ath9k_hif_usb_reg_in_cb(struct urb *urb)
671 struct sk_buff *skb = (struct sk_buff *) urb->context;
672 struct sk_buff *nskb;
673 struct hif_device_usb *hif_dev =
674 usb_get_intfdata(usb_ifnum_to_if(urb->dev, 0));
683 switch (urb->status) {
692 skb_reset_tail_pointer(skb);
698 if (likely(urb->actual_length != 0)) {
699 skb_put(skb, urb->actual_length);
701 /* Process the command first */
702 ath9k_htc_rx_msg(hif_dev->htc_handle, skb,
703 skb->len, USB_REG_IN_PIPE);
706 nskb = alloc_skb(MAX_REG_IN_BUF_SIZE, GFP_ATOMIC);
708 dev_err(&hif_dev->udev->dev,
709 "ath9k_htc: REG_IN memory allocation failure\n");
714 usb_fill_bulk_urb(urb, hif_dev->udev,
715 usb_rcvbulkpipe(hif_dev->udev,
717 nskb->data, MAX_REG_IN_BUF_SIZE,
718 ath9k_hif_usb_reg_in_cb, nskb);
722 usb_anchor_urb(urb, &hif_dev->reg_in_submitted);
723 ret = usb_submit_urb(urb, GFP_ATOMIC);
725 usb_unanchor_urb(urb);
735 static void ath9k_hif_usb_dealloc_tx_urbs(struct hif_device_usb *hif_dev)
737 struct tx_buf *tx_buf = NULL, *tx_buf_tmp = NULL;
740 list_for_each_entry_safe(tx_buf, tx_buf_tmp,
741 &hif_dev->tx.tx_buf, list) {
742 usb_kill_urb(tx_buf->urb);
743 list_del(&tx_buf->list);
744 usb_free_urb(tx_buf->urb);
749 spin_lock_irqsave(&hif_dev->tx.tx_lock, flags);
750 hif_dev->tx.flags |= HIF_USB_TX_FLUSH;
751 spin_unlock_irqrestore(&hif_dev->tx.tx_lock, flags);
753 list_for_each_entry_safe(tx_buf, tx_buf_tmp,
754 &hif_dev->tx.tx_pending, list) {
755 usb_kill_urb(tx_buf->urb);
756 list_del(&tx_buf->list);
757 usb_free_urb(tx_buf->urb);
762 usb_kill_anchored_urbs(&hif_dev->mgmt_submitted);
765 static int ath9k_hif_usb_alloc_tx_urbs(struct hif_device_usb *hif_dev)
767 struct tx_buf *tx_buf;
770 INIT_LIST_HEAD(&hif_dev->tx.tx_buf);
771 INIT_LIST_HEAD(&hif_dev->tx.tx_pending);
772 spin_lock_init(&hif_dev->tx.tx_lock);
773 __skb_queue_head_init(&hif_dev->tx.tx_skb_queue);
774 init_usb_anchor(&hif_dev->mgmt_submitted);
776 for (i = 0; i < MAX_TX_URB_NUM; i++) {
777 tx_buf = kzalloc(sizeof(struct tx_buf), GFP_KERNEL);
781 tx_buf->buf = kzalloc(MAX_TX_BUF_SIZE, GFP_KERNEL);
785 tx_buf->urb = usb_alloc_urb(0, GFP_KERNEL);
789 tx_buf->hif_dev = hif_dev;
790 __skb_queue_head_init(&tx_buf->skb_queue);
792 list_add_tail(&tx_buf->list, &hif_dev->tx.tx_buf);
795 hif_dev->tx.tx_buf_cnt = MAX_TX_URB_NUM;
803 ath9k_hif_usb_dealloc_tx_urbs(hif_dev);
807 static void ath9k_hif_usb_dealloc_rx_urbs(struct hif_device_usb *hif_dev)
809 usb_kill_anchored_urbs(&hif_dev->rx_submitted);
812 static int ath9k_hif_usb_alloc_rx_urbs(struct hif_device_usb *hif_dev)
814 struct urb *urb = NULL;
815 struct sk_buff *skb = NULL;
818 init_usb_anchor(&hif_dev->rx_submitted);
819 spin_lock_init(&hif_dev->rx_lock);
821 for (i = 0; i < MAX_RX_URB_NUM; i++) {
824 urb = usb_alloc_urb(0, GFP_KERNEL);
830 /* Allocate buffer */
831 skb = alloc_skb(MAX_RX_BUF_SIZE, GFP_KERNEL);
837 usb_fill_bulk_urb(urb, hif_dev->udev,
838 usb_rcvbulkpipe(hif_dev->udev,
840 skb->data, MAX_RX_BUF_SIZE,
841 ath9k_hif_usb_rx_cb, skb);
844 usb_anchor_urb(urb, &hif_dev->rx_submitted);
847 ret = usb_submit_urb(urb, GFP_KERNEL);
849 usb_unanchor_urb(urb);
854 * Drop reference count.
855 * This ensures that the URB is freed when killing them.
867 ath9k_hif_usb_dealloc_rx_urbs(hif_dev);
871 static void ath9k_hif_usb_dealloc_reg_in_urbs(struct hif_device_usb *hif_dev)
873 usb_kill_anchored_urbs(&hif_dev->reg_in_submitted);
876 static int ath9k_hif_usb_alloc_reg_in_urbs(struct hif_device_usb *hif_dev)
878 struct urb *urb = NULL;
879 struct sk_buff *skb = NULL;
882 init_usb_anchor(&hif_dev->reg_in_submitted);
884 for (i = 0; i < MAX_REG_IN_URB_NUM; i++) {
887 urb = usb_alloc_urb(0, GFP_KERNEL);
893 /* Allocate buffer */
894 skb = alloc_skb(MAX_REG_IN_BUF_SIZE, GFP_KERNEL);
900 usb_fill_bulk_urb(urb, hif_dev->udev,
901 usb_rcvbulkpipe(hif_dev->udev,
903 skb->data, MAX_REG_IN_BUF_SIZE,
904 ath9k_hif_usb_reg_in_cb, skb);
907 usb_anchor_urb(urb, &hif_dev->reg_in_submitted);
910 ret = usb_submit_urb(urb, GFP_KERNEL);
912 usb_unanchor_urb(urb);
917 * Drop reference count.
918 * This ensures that the URB is freed when killing them.
930 ath9k_hif_usb_dealloc_reg_in_urbs(hif_dev);
934 static int ath9k_hif_usb_alloc_urbs(struct hif_device_usb *hif_dev)
937 init_usb_anchor(&hif_dev->regout_submitted);
940 if (ath9k_hif_usb_alloc_tx_urbs(hif_dev) < 0)
944 if (ath9k_hif_usb_alloc_rx_urbs(hif_dev) < 0)
948 if (ath9k_hif_usb_alloc_reg_in_urbs(hif_dev) < 0)
953 ath9k_hif_usb_dealloc_rx_urbs(hif_dev);
955 ath9k_hif_usb_dealloc_tx_urbs(hif_dev);
960 static void ath9k_hif_usb_dealloc_urbs(struct hif_device_usb *hif_dev)
962 usb_kill_anchored_urbs(&hif_dev->regout_submitted);
963 ath9k_hif_usb_dealloc_reg_in_urbs(hif_dev);
964 ath9k_hif_usb_dealloc_tx_urbs(hif_dev);
965 ath9k_hif_usb_dealloc_rx_urbs(hif_dev);
968 static int ath9k_hif_usb_download_fw(struct hif_device_usb *hif_dev,
972 const void *data = hif_dev->firmware->data;
973 size_t len = hif_dev->firmware->size;
974 u32 addr = AR9271_FIRMWARE;
975 u8 *buf = kzalloc(4096, GFP_KERNEL);
982 transfer = min_t(int, len, 4096);
983 memcpy(buf, data, transfer);
985 err = usb_control_msg(hif_dev->udev,
986 usb_sndctrlpipe(hif_dev->udev, 0),
987 FIRMWARE_DOWNLOAD, 0x40 | USB_DIR_OUT,
988 addr >> 8, 0, buf, transfer, HZ);
1000 if (IS_AR7010_DEVICE(drv_info))
1001 firm_offset = AR7010_FIRMWARE_TEXT;
1003 firm_offset = AR9271_FIRMWARE_TEXT;
1006 * Issue FW download complete command to firmware.
1008 err = usb_control_msg(hif_dev->udev, usb_sndctrlpipe(hif_dev->udev, 0),
1009 FIRMWARE_DOWNLOAD_COMP,
1011 firm_offset >> 8, 0, NULL, 0, HZ);
1015 dev_info(&hif_dev->udev->dev, "ath9k_htc: Transferred FW: %s, size: %ld\n",
1016 hif_dev->fw_name, (unsigned long) hif_dev->firmware->size);
1021 static int ath9k_hif_usb_dev_init(struct hif_device_usb *hif_dev, u32 drv_info)
1024 struct usb_host_interface *alt = &hif_dev->interface->altsetting[0];
1025 struct usb_endpoint_descriptor *endp;
1027 /* Request firmware */
1028 ret = request_firmware(&hif_dev->firmware, hif_dev->fw_name,
1029 &hif_dev->udev->dev);
1031 dev_err(&hif_dev->udev->dev,
1032 "ath9k_htc: Firmware - %s not found\n", hif_dev->fw_name);
1036 /* Download firmware */
1037 ret = ath9k_hif_usb_download_fw(hif_dev, drv_info);
1039 dev_err(&hif_dev->udev->dev,
1040 "ath9k_htc: Firmware - %s download failed\n",
1042 goto err_fw_download;
1045 /* On downloading the firmware to the target, the USB descriptor of EP4
1046 * is 'patched' to change the type of the endpoint to Bulk. This will
1047 * bring down CPU usage during the scan period.
1049 for (idx = 0; idx < alt->desc.bNumEndpoints; idx++) {
1050 endp = &alt->endpoint[idx].desc;
1051 if ((endp->bmAttributes & USB_ENDPOINT_XFERTYPE_MASK)
1052 == USB_ENDPOINT_XFER_INT) {
1053 endp->bmAttributes &= ~USB_ENDPOINT_XFERTYPE_MASK;
1054 endp->bmAttributes |= USB_ENDPOINT_XFER_BULK;
1055 endp->bInterval = 0;
1060 ret = ath9k_hif_usb_alloc_urbs(hif_dev);
1062 dev_err(&hif_dev->udev->dev,
1063 "ath9k_htc: Unable to allocate URBs\n");
1064 goto err_fw_download;
1070 release_firmware(hif_dev->firmware);
1072 hif_dev->firmware = NULL;
1076 static void ath9k_hif_usb_dev_deinit(struct hif_device_usb *hif_dev)
1078 ath9k_hif_usb_dealloc_urbs(hif_dev);
1079 if (hif_dev->firmware)
1080 release_firmware(hif_dev->firmware);
1084 * An exact copy of the function from zd1211rw.
1086 static int send_eject_command(struct usb_interface *interface)
1088 struct usb_device *udev = interface_to_usbdev(interface);
1089 struct usb_host_interface *iface_desc = &interface->altsetting[0];
1090 struct usb_endpoint_descriptor *endpoint;
1095 /* Find bulk out endpoint */
1096 for (r = 1; r >= 0; r--) {
1097 endpoint = &iface_desc->endpoint[r].desc;
1098 if (usb_endpoint_dir_out(endpoint) &&
1099 usb_endpoint_xfer_bulk(endpoint)) {
1100 bulk_out_ep = endpoint->bEndpointAddress;
1106 "ath9k_htc: Could not find bulk out endpoint\n");
1110 cmd = kzalloc(31, GFP_KERNEL);
1114 /* USB bulk command block */
1115 cmd[0] = 0x55; /* bulk command signature */
1116 cmd[1] = 0x53; /* bulk command signature */
1117 cmd[2] = 0x42; /* bulk command signature */
1118 cmd[3] = 0x43; /* bulk command signature */
1119 cmd[14] = 6; /* command length */
1121 cmd[15] = 0x1b; /* SCSI command: START STOP UNIT */
1122 cmd[19] = 0x2; /* eject disc */
1124 dev_info(&udev->dev, "Ejecting storage device...\n");
1125 r = usb_bulk_msg(udev, usb_sndbulkpipe(udev, bulk_out_ep),
1126 cmd, 31, NULL, 2000);
1131 /* At this point, the device disconnects and reconnects with the real
1134 usb_set_intfdata(interface, NULL);
1138 static int ath9k_hif_usb_probe(struct usb_interface *interface,
1139 const struct usb_device_id *id)
1141 struct usb_device *udev = interface_to_usbdev(interface);
1142 struct hif_device_usb *hif_dev;
1145 if (id->driver_info == STORAGE_DEVICE)
1146 return send_eject_command(interface);
1148 hif_dev = kzalloc(sizeof(struct hif_device_usb), GFP_KERNEL);
1155 hif_dev->udev = udev;
1156 hif_dev->interface = interface;
1157 hif_dev->device_id = id->idProduct;
1159 udev->reset_resume = 1;
1161 usb_set_intfdata(interface, hif_dev);
1163 hif_dev->htc_handle = ath9k_htc_hw_alloc(hif_dev, &hif_usb,
1164 &hif_dev->udev->dev);
1165 if (hif_dev->htc_handle == NULL) {
1167 goto err_htc_hw_alloc;
1170 /* Find out which firmware to load */
1172 if (IS_AR7010_DEVICE(id->driver_info))
1173 hif_dev->fw_name = FIRMWARE_AR7010_1_1;
1175 hif_dev->fw_name = FIRMWARE_AR9271;
1177 ret = ath9k_hif_usb_dev_init(hif_dev, id->driver_info);
1180 goto err_hif_init_usb;
1183 ret = ath9k_htc_hw_init(hif_dev->htc_handle,
1184 &interface->dev, hif_dev->device_id,
1185 hif_dev->udev->product, id->driver_info);
1188 goto err_htc_hw_init;
1191 dev_info(&hif_dev->udev->dev, "ath9k_htc: USB layer initialized\n");
1196 ath9k_hif_usb_dev_deinit(hif_dev);
1198 ath9k_htc_hw_free(hif_dev->htc_handle);
1200 usb_set_intfdata(interface, NULL);
1207 static void ath9k_hif_usb_reboot(struct usb_device *udev)
1209 u32 reboot_cmd = 0xffffffff;
1213 buf = kmemdup(&reboot_cmd, 4, GFP_KERNEL);
1217 ret = usb_bulk_msg(udev, usb_sndbulkpipe(udev, USB_REG_OUT_PIPE),
1220 dev_err(&udev->dev, "ath9k_htc: USB reboot failed\n");
1225 static void ath9k_hif_usb_disconnect(struct usb_interface *interface)
1227 struct usb_device *udev = interface_to_usbdev(interface);
1228 struct hif_device_usb *hif_dev = usb_get_intfdata(interface);
1229 bool unplugged = (udev->state == USB_STATE_NOTATTACHED) ? true : false;
1234 ath9k_htc_hw_deinit(hif_dev->htc_handle, unplugged);
1235 ath9k_htc_hw_free(hif_dev->htc_handle);
1236 ath9k_hif_usb_dev_deinit(hif_dev);
1237 usb_set_intfdata(interface, NULL);
1239 if (!unplugged && (hif_dev->flags & HIF_USB_START))
1240 ath9k_hif_usb_reboot(udev);
1243 dev_info(&udev->dev, "ath9k_htc: USB layer deinitialized\n");
1248 static int ath9k_hif_usb_suspend(struct usb_interface *interface,
1249 pm_message_t message)
1251 struct hif_device_usb *hif_dev = usb_get_intfdata(interface);
1254 * The device has to be set to FULLSLEEP mode in case no
1257 if (!(hif_dev->flags & HIF_USB_START))
1258 ath9k_htc_suspend(hif_dev->htc_handle);
1260 ath9k_hif_usb_dealloc_urbs(hif_dev);
1265 static int ath9k_hif_usb_resume(struct usb_interface *interface)
1267 struct hif_device_usb *hif_dev = usb_get_intfdata(interface);
1268 struct htc_target *htc_handle = hif_dev->htc_handle;
1271 ret = ath9k_hif_usb_alloc_urbs(hif_dev);
1275 if (hif_dev->firmware) {
1276 ret = ath9k_hif_usb_download_fw(hif_dev,
1277 htc_handle->drv_priv->ah->hw_version.usbdev);
1281 ath9k_hif_usb_dealloc_urbs(hif_dev);
1287 ret = ath9k_htc_resume(htc_handle);
1295 ath9k_hif_usb_dealloc_urbs(hif_dev);
1301 static struct usb_driver ath9k_hif_usb_driver = {
1302 .name = KBUILD_MODNAME,
1303 .probe = ath9k_hif_usb_probe,
1304 .disconnect = ath9k_hif_usb_disconnect,
1306 .suspend = ath9k_hif_usb_suspend,
1307 .resume = ath9k_hif_usb_resume,
1308 .reset_resume = ath9k_hif_usb_resume,
1310 .id_table = ath9k_hif_usb_ids,
1314 int ath9k_hif_usb_init(void)
1316 return usb_register(&ath9k_hif_usb_driver);
1319 void ath9k_hif_usb_exit(void)
1321 usb_deregister(&ath9k_hif_usb_driver);