1 #include <adf_os_types.h>
2 #include <adf_os_dma.h>
3 #include <adf_os_timer.h>
4 #include <adf_os_lock.h>
6 #include <adf_os_mem.h>
7 #include <adf_os_util.h>
8 #include <adf_os_stdtypes.h>
9 #include <adf_os_defer.h>
10 #include <adf_os_atomic.h>
13 #include <adf_net_wcmd.h>
15 #include "if_ethersubr.h"
18 #ifdef USE_HEADERLEN_RESV
22 #include <ieee80211_var.h>
23 #include "if_athrate.h"
24 #include "if_athvar.h"
26 #include "if_ath_pci.h"
28 #define ath_tgt_free_skb adf_nbuf_free
30 #define OFDM_PLCP_BITS 22
31 #define HT_RC_2_MCS(_rc) ((_rc) & 0x0f)
32 #define HT_RC_2_STREAMS(_rc) ((((_rc) & 0x78) >> 3) + 1)
38 #define HT_LTF(_ns) (4 * (_ns))
39 #define SYMBOL_TIME(_ns) ((_ns) << 2) // ns * 4 us
40 #define SYMBOL_TIME_HALFGI(_ns) (((_ns) * 18 + 4) / 5) // ns * 3.6 us
42 static a_uint16_t bits_per_symbol[][2] = {
44 { 26, 54 }, // 0: BPSK
45 { 52, 108 }, // 1: QPSK 1/2
46 { 78, 162 }, // 2: QPSK 3/4
47 { 104, 216 }, // 3: 16-QAM 1/2
48 { 156, 324 }, // 4: 16-QAM 3/4
49 { 208, 432 }, // 5: 64-QAM 2/3
50 { 234, 486 }, // 6: 64-QAM 3/4
51 { 260, 540 }, // 7: 64-QAM 5/6
52 { 52, 108 }, // 8: BPSK
53 { 104, 216 }, // 9: QPSK 1/2
54 { 156, 324 }, // 10: QPSK 3/4
55 { 208, 432 }, // 11: 16-QAM 1/2
56 { 312, 648 }, // 12: 16-QAM 3/4
57 { 416, 864 }, // 13: 64-QAM 2/3
58 { 468, 972 }, // 14: 64-QAM 3/4
59 { 520, 1080 }, // 15: 64-QAM 5/6
62 void owltgt_tx_processq(struct ath_softc_tgt *sc, struct ath_txq *txq,
63 owl_txq_state_t txqstate);
64 static void ath_tgt_txqaddbuf(struct ath_softc_tgt *sc, struct ath_txq *txq,
65 struct ath_buf *bf, struct ath_desc *lastds);
66 void ath_rate_findrate_11n_Hardcoded(struct ath_softc_tgt *sc,
67 struct ath_rc_series series[]);
68 void ath_buf_set_rate_Hardcoded(struct ath_softc_tgt *sc,
69 struct ath_tx_buf *bf) ;
70 static a_int32_t ath_tgt_txbuf_setup(struct ath_softc_tgt *sc,
71 struct ath_tx_buf *bf, ath_data_hdr_t *dh);
72 static void ath_tx_freebuf(struct ath_softc_tgt *sc, struct ath_tx_buf *bf);
73 static void ath_tx_uc_comp(struct ath_softc_tgt *sc, struct ath_tx_buf *bf);
74 static void ath_update_stats(struct ath_softc_tgt *sc, struct ath_buf *bf);
75 void adf_print_buf(adf_nbuf_t buf);
76 static void ath_tgt_tx_enqueue(struct ath_txq *txq, struct ath_atx_tid *tid);
78 struct ath_buf * ath_tgt_tx_prepare(struct ath_softc_tgt *sc,
79 adf_nbuf_t skb, ath_data_hdr_t *dh);
80 void ath_tgt_tx_comp_aggr(struct ath_softc_tgt *sc, struct ath_tx_buf *bf);
81 struct ieee80211_frame *ATH_SKB_2_WH(adf_nbuf_t skb);
83 void ath_tgt_tx_send_normal(struct ath_softc_tgt *sc, struct ath_tx_buf *bf);
85 static void ath_tgt_tx_sched_normal(struct ath_softc_tgt *sc, ath_atx_tid_t *tid);
86 static void ath_tgt_tx_sched_aggr(struct ath_softc_tgt *sc, ath_atx_tid_t *tid);
88 static struct ath_node_target * owltarget_findnode(struct tx_frame_heade *dh,
89 struct ath_softc_tgt *sc,
90 struct adf_nbuf_t *skb);
91 extern a_int32_t ath_chainmask_sel_logic(void *);
92 static a_int32_t ath_get_pktlen(struct ath_buf *bf, a_int32_t hdrlen);
93 static void ath_tgt_txq_schedule(struct ath_softc *sc, struct ath_txq *txq);
95 typedef void (*ath_ft_set_atype_t)(struct ath_softc_tgt *sc, struct ath_buf *bf);
98 ath_tx_set_retry(struct ath_softc_tgt *sc, struct ath_tx_buf *bf);
101 ath_bar_tx(struct ath_softc_tgt *sc, ath_atx_tid_t *tid, struct ath_tx_buf *bf);
103 ath_tx_update_baw(ath_atx_tid_t *tid, int seqno);
105 ath_tx_retry_subframe(struct ath_softc_tgt *sc, struct ath_tx_buf *bf,
106 ath_bufhead *bf_q, struct ath_tx_buf **bar);
109 ath_tx_comp_aggr_error(struct ath_softc_tgt *sc, struct ath_tx_buf *bf, ath_atx_tid_t *tid);
111 void ath_tx_addto_baw(ath_atx_tid_t *tid, struct ath_tx_buf *bf);
112 static inline void ath_tx_retry_unaggr(struct ath_softc_tgt *sc, struct ath_tx_buf *bf);
113 static void ath_tx_comp_unaggr(struct ath_softc_tgt *sc, struct ath_tx_buf *bf);
114 static void ath_update_aggr_stats(struct ath_softc_tgt *sc, struct ath_tx_desc *ds,
115 int nframes, int nbad);
116 static inline void ath_aggr_resume_tid(struct ath_softc_tgt *sc, ath_atx_tid_t *tid);
117 static void ath_tx_comp_cleanup(struct ath_softc_tgt *sc, struct ath_tx_buf *bf);
119 int ath_tgt_tx_add_to_aggr(struct ath_softc_tgt *sc,
120 struct ath_buf *bf,int datatype,
121 ath_atx_tid_t *tid, int is_burst);
123 struct ieee80211_frame *ATH_SKB_2_WH(adf_nbuf_t skb)
128 adf_nbuf_peek_header(skb, &anbdata, &anblen);
130 return((struct ieee80211_frame *)anbdata);
133 #undef adf_os_cpu_to_le16
135 static a_uint16_t adf_os_cpu_to_le16(a_uint16_t x)
137 return ((((x) & 0xff00) >> 8) | (((x) & 0x00ff) << 8));
141 ath_aggr_resume_tid(struct ath_softc_tgt *sc, ath_atx_tid_t *tid)
145 txq = TID_TO_ACTXQ(tid->tidno);
148 if (asf_tailq_empty(&tid->buf_q))
151 ath_tgt_tx_enqueue(txq, tid);
152 ath_tgt_txq_schedule(sc, txq);
156 ath_aggr_pause_tid(struct ath_softc_tgt *sc, ath_atx_tid_t *tid)
161 static a_uint32_t ath_pkt_duration(struct ath_softc_tgt *sc,
162 a_uint8_t rix, struct ath_tx_buf *bf,
163 a_int32_t width, a_int32_t half_gi)
165 const HAL_RATE_TABLE *rt = sc->sc_currates;
166 a_uint32_t nbits, nsymbits, duration, nsymbols;
171 pktlen = bf->bf_isaggr ? bf->bf_al : bf->bf_pktlen;
172 rc = rt->info[rix].rateCode;
175 return ath_hal_computetxtime(sc->sc_ah, rt, pktlen, rix,
178 nbits = (pktlen << 3) + OFDM_PLCP_BITS;
179 nsymbits = bits_per_symbol[HT_RC_2_MCS(rc)][width];
180 nsymbols = (nbits + nsymbits - 1) / nsymbits;
183 duration = SYMBOL_TIME(nsymbols);
185 duration = SYMBOL_TIME_HALFGI(nsymbols);
187 streams = HT_RC_2_STREAMS(rc);
188 duration += L_STF + L_LTF + L_SIG + HT_SIG + HT_STF + HT_LTF(streams);
193 static void ath_dma_map(struct ath_softc_tgt *sc, struct ath_buf *bf)
195 adf_nbuf_t skb = bf->bf_skb;
197 skb = adf_nbuf_queue_first(&bf->bf_skbhead);
198 adf_nbuf_map(sc->sc_dev, bf->bf_dmamap, skb, ADF_OS_DMA_TO_DEVICE);
201 static void ath_dma_unmap(struct ath_softc_tgt *sc, struct ath_buf *bf)
203 adf_nbuf_t skb = bf->bf_skb;
205 skb = adf_nbuf_queue_first(&bf->bf_skbhead);
206 adf_nbuf_unmap( sc->sc_dev, bf->bf_dmamap, ADF_OS_DMA_TO_DEVICE);
209 static void ath_filltxdesc(struct ath_softc_tgt *sc, struct ath_tx_buf *bf)
211 struct ath_desc *ds0, *ds = bf->bf_desc;
215 adf_nbuf_dmamap_info(bf->bf_dmamap, &bf->bf_dmamap_info);
217 for (i = 0; i < bf->bf_dmamap_info.nsegs; i++, ds++) {
219 ds->ds_data = bf->bf_dmamap_info.dma_segs[i].paddr;
221 if (i == (bf->bf_dmamap_info.nsegs - 1)) {
225 ds->ds_link = ATH_BUF_GET_DESC_PHY_ADDR_WITH_IDX(bf, i+1);
227 ath_hal_filltxdesc(sc->sc_ah, ds
228 , bf->bf_dmamap_info.dma_segs[i].len
230 , i == (bf->bf_dmamap_info.nsegs - 1)
235 static void ath_tx_tgt_setds(struct ath_softc_tgt *sc, struct ath_tx_buf *bf)
237 struct ath_desc *ds0, *ds = bf->bf_desc;
239 adf_nbuf_queue_t skbhead;
240 a_int32_t i, dscnt = 0;
242 switch (bf->bf_protmode) {
243 case IEEE80211_PROT_RTSCTS:
244 bf->bf_flags |= HAL_TXDESC_RTSENA;
246 case IEEE80211_PROT_CTSONLY:
247 bf->bf_flags |= HAL_TXDESC_CTSENA;
253 ath_hal_set11n_txdesc(sc->sc_ah, ds
259 , bf->bf_flags | HAL_TXDESC_INTREQ);
261 ath_filltxdesc(sc, bf);
264 static struct ath_buf *ath_buf_toggle(struct ath_softc_tgt *sc,
265 struct ath_tx_buf *bf,
268 struct ath_tx_buf *tmp = NULL;
269 adf_nbuf_t buf = NULL;
271 adf_os_assert(sc->sc_txbuf_held != NULL);
273 tmp = sc->sc_txbuf_held;
276 ath_dma_unmap(sc, bf);
277 adf_nbuf_queue_init(&tmp->bf_skbhead);
278 buf = adf_nbuf_queue_remove(&bf->bf_skbhead);
280 adf_nbuf_queue_add(&tmp->bf_skbhead, buf);
282 adf_os_assert(adf_nbuf_queue_len(&bf->bf_skbhead) == 0);
284 tmp->bf_next = bf->bf_next;
285 tmp->bf_endpt = bf->bf_endpt;
286 tmp->bf_tidno = bf->bf_tidno;
287 tmp->bf_skb = bf->bf_skb;
288 tmp->bf_node = bf->bf_node;
289 tmp->bf_isaggr = bf->bf_isaggr;
290 tmp->bf_flags = bf->bf_flags;
291 tmp->bf_state = bf->bf_state;
292 tmp->bf_retries = bf->bf_retries;
293 tmp->bf_comp = bf->bf_comp;
294 tmp->bf_nframes = bf->bf_nframes;
295 tmp->bf_cookie = bf->bf_cookie;
307 ath_dma_map(sc, tmp);
308 ath_tx_tgt_setds(sc, tmp);
311 sc->sc_txbuf_held = bf;
316 static void ath_tgt_skb_free(struct ath_softc_tgt *sc,
317 adf_nbuf_queue_t *head,
318 HTC_ENDPOINT_ID endpt)
322 while (adf_nbuf_queue_len(head) != 0) {
323 tskb = adf_nbuf_queue_remove(head);
324 ath_free_tx_skb(sc->tgt_htc_handle,endpt,tskb);
328 static void ath_buf_comp(struct ath_softc_tgt *sc, struct ath_tx_buf *bf)
330 ath_dma_unmap(sc, bf);
331 ath_tgt_skb_free(sc, &bf->bf_skbhead,bf->bf_endpt);
334 bf = ath_buf_toggle(sc, bf, 0);
336 asf_tailq_insert_tail(&sc->sc_txbuf, bf, bf_list);
340 static void ath_buf_set_rate(struct ath_softc_tgt *sc, struct ath_tx_buf *bf)
342 struct ath_hal *ah = sc->sc_ah;
343 const HAL_RATE_TABLE *rt;
344 struct ath_desc *ds = bf->bf_desc;
345 HAL_11N_RATE_SERIES series[4];
347 a_uint8_t rix, cix, rtsctsrate;
348 a_uint32_t aggr_limit_with_rts;
349 a_uint32_t ctsduration = 0;
350 a_int32_t prot_mode = AH_FALSE;
352 rt = sc->sc_currates;
353 rix = bf->bf_rcs[0].rix;
354 flags = (bf->bf_flags & (HAL_TXDESC_RTSENA | HAL_TXDESC_CTSENA));
355 cix = rt->info[sc->sc_protrix].controlRate;
357 if (bf->bf_protmode != IEEE80211_PROT_NONE &&
358 (rt->info[rix].phy == IEEE80211_T_OFDM ||
359 rt->info[rix].phy == IEEE80211_T_HT) &&
360 (bf->bf_flags & HAL_TXDESC_NOACK) == 0) {
361 cix = rt->info[sc->sc_protrix].controlRate;
364 if (ath_hal_htsupported(ah) && (!bf->bf_ismcast))
365 flags = HAL_TXDESC_RTSENA;
368 if (bf->bf_rcs[i].tries) {
369 cix = rt->info[bf->bf_rcs[i].rix].controlRate;
376 ath_hal_getrtsaggrlimit(sc->sc_ah, &aggr_limit_with_rts);
378 if (bf->bf_isaggr && aggr_limit_with_rts &&
379 bf->bf_al > aggr_limit_with_rts) {
380 flags &= ~(HAL_TXDESC_RTSENA);
383 adf_os_mem_set(series, 0, sizeof(HAL_11N_RATE_SERIES) * 4);
385 for (i = 0; i < 4; i++) {
386 if (!bf->bf_rcs[i].tries)
389 rix = bf->bf_rcs[i].rix;
391 series[i].Rate = rt->info[rix].rateCode |
392 (bf->bf_shpream ? rt->info[rix].shortPreamble : 0);
394 series[i].Tries = bf->bf_rcs[i].tries;
396 series[i].RateFlags = ((bf->bf_rcs[i].flags & ATH_RC_RTSCTS_FLAG) ?
397 HAL_RATESERIES_RTS_CTS : 0 ) |
398 ((bf->bf_rcs[i].flags & ATH_RC_CW40_FLAG) ?
399 HAL_RATESERIES_2040 : 0 ) |
400 ((bf->bf_rcs[i].flags & ATH_RC_HT40_SGI_FLAG) ?
401 HAL_RATESERIES_HALFGI : 0 ) |
402 ((bf->bf_rcs[i].flags & ATH_RC_TX_STBC_FLAG) ?
403 HAL_RATESERIES_STBC: 0);
405 series[i].RateFlags = ((bf->bf_rcs[i].flags & ATH_RC_RTSCTS_FLAG) ?
406 HAL_RATESERIES_RTS_CTS : 0 ) |
407 ((bf->bf_rcs[i].flags & ATH_RC_CW40_FLAG) ?
408 HAL_RATESERIES_2040 : 0 ) |
409 ((bf->bf_rcs[i].flags & ATH_RC_HT40_SGI_FLAG) ?
410 HAL_RATESERIES_HALFGI : 0 );
412 series[i].PktDuration = ath_pkt_duration(sc, rix, bf,
413 (bf->bf_rcs[i].flags & ATH_RC_CW40_FLAG) != 0,
414 (bf->bf_rcs[i].flags & ATH_RC_HT40_SGI_FLAG));
416 series[i].ChSel = sc->sc_ic.ic_tx_chainmask;
419 series[i].RateFlags |= HAL_RATESERIES_RTS_CTS;
421 if (bf->bf_rcs[i].flags & ATH_RC_DS_FLAG)
422 series[i].RateFlags |= HAL_RATESERIES_RTS_CTS;
425 rtsctsrate = rt->info[cix].rateCode |
426 (bf->bf_shpream ? rt->info[cix].shortPreamble : 0);
428 ath_hal_set11n_ratescenario(ah, ds, 1,
429 rtsctsrate, ctsduration,
434 static void ath_tgt_rate_findrate(struct ath_softc_tgt *sc,
435 struct ath_node_target *an,
436 a_int32_t shortPreamble,
442 struct ath_rc_series series[],
445 ath_rate_findrate(sc, an, 1, frameLen, 10, 4, 1,
446 ATH_RC_PROBE_ALLOWED, series, isProbe);
449 static void owl_tgt_tid_init(struct ath_atx_tid *tid)
453 tid->seq_start = tid->seq_next = 0;
454 tid->baw_size = WME_MAX_BA;
455 tid->baw_head = tid->baw_tail = 0;
458 tid->sched = AH_FALSE;
460 asf_tailq_init(&tid->buf_q);
462 for (i = 0; i < ATH_TID_MAX_BUFS; i++) {
463 TX_BUF_BITMAP_CLR(tid->tx_buf_bitmap, i);
467 static void owl_tgt_tid_cleanup(struct ath_softc_tgt *sc,
468 struct ath_atx_tid *tid)
475 tid->flag &= ~TID_CLEANUP_INPROGRES;
477 if (tid->flag & TID_REINITIALIZE) {
478 adf_os_print("TID REINIT DONE for tid %p\n", tid);
479 tid->flag &= ~TID_REINITIALIZE;
480 owl_tgt_tid_init(tid);
482 ath_aggr_resume_tid(sc, tid);
486 void owl_tgt_node_init(struct ath_node_target * an)
488 struct ath_atx_tid *tid;
491 for (tidno = 0, tid = &an->tid[tidno]; tidno < WME_NUM_TID;tidno++, tid++) {
495 if ( tid->flag & TID_CLEANUP_INPROGRES ) {
496 tid->flag |= TID_REINITIALIZE;
497 adf_os_print("tid[%p]->incomp is not 0: %d\n",
500 owl_tgt_tid_init(tid);
505 void ath_tx_status_clear(struct ath_softc_tgt *sc)
509 for (i = 0; i < 2; i++) {
510 sc->tx_status[i].cnt = 0;
514 struct WMI_TXSTATUS_EVENT* ath_tx_status_get(struct ath_softc_tgt *sc)
516 WMI_TXSTATUS_EVENT *txs = NULL;
519 for (i = 0; i < 2; i++) {
520 if (sc->tx_status[i].cnt < HTC_MAX_TX_STATUS) {
521 txs = &sc->tx_status[i];
529 void ath_tx_status_update(struct ath_softc_tgt *sc, struct ath_tx_buf *bf)
531 struct ath_tx_desc *ds = bf->bf_lastds;
532 WMI_TXSTATUS_EVENT *txs;
534 if (sc->sc_tx_draining)
537 txs = ath_tx_status_get(sc);
541 txs->txstatus[txs->cnt].cookie = bf->bf_cookie;
542 txs->txstatus[txs->cnt].ts_rate = SM(bf->bf_endpt, ATH9K_HTC_TXSTAT_EPID);
544 if (ds->ds_txstat.ts_status & HAL_TXERR_FILT)
545 txs->txstatus[txs->cnt].ts_flags |= ATH9K_HTC_TXSTAT_FILT;
547 if (!(ds->ds_txstat.ts_status & HAL_TXERR_XRETRY) &&
548 !(ds->ds_txstat.ts_status & HAL_TXERR_FIFO) &&
549 !(ds->ds_txstat.ts_status & HAL_TXERR_TIMER_EXPIRED) &&
550 !(ds->ds_txstat.ts_status & HAL_TXERR_FILT))
551 txs->txstatus[txs->cnt].ts_flags |= ATH9K_HTC_TXSTAT_ACK;
553 ath_tx_status_update_rate(sc, bf->bf_rcs, ds->ds_txstat.ts_rate, txs);
558 void ath_tx_status_update_aggr(struct ath_softc_tgt *sc, struct ath_tx_buf *bf,
559 struct ath_tx_desc *ds, struct ath_rc_series rcs[],
562 WMI_TXSTATUS_EVENT *txs;
564 if (sc->sc_tx_draining)
567 txs = ath_tx_status_get(sc);
571 txs->txstatus[txs->cnt].cookie = bf->bf_cookie;
572 txs->txstatus[txs->cnt].ts_rate = SM(bf->bf_endpt, ATH9K_HTC_TXSTAT_EPID);
575 txs->txstatus[txs->cnt].ts_flags |= ATH9K_HTC_TXSTAT_ACK;
578 ath_tx_status_update_rate(sc, rcs, ds->ds_txstat.ts_rate, txs);
583 void ath_tx_status_send(struct ath_softc_tgt *sc)
587 if (sc->sc_tx_draining)
590 for (i = 0; i < 2; i++) {
591 if (sc->tx_status[i].cnt) {
592 wmi_event(sc->tgt_wmi_handle, WMI_TXSTATUS_EVENTID,
593 &sc->tx_status[i], sizeof(WMI_TXSTATUS_EVENT));
594 /* FIXME: Handle failures. */
595 sc->tx_status[i].cnt = 0;
600 static void owltgt_tx_process_cabq(struct ath_softc_tgt *sc, struct ath_txq *txq)
602 ath_hal_intrset(sc->sc_ah, sc->sc_imask & ~HAL_INT_SWBA);
603 owltgt_tx_processq(sc, txq, OWL_TXQ_ACTIVE);
604 ath_hal_intrset(sc->sc_ah, sc->sc_imask);
607 void owl_tgt_tx_tasklet(TQUEUE_ARG data)
609 struct ath_softc_tgt *sc = (struct ath_softc_tgt *)data;
611 a_uint32_t qcumask = ((1 << HAL_NUM_TX_QUEUES) - 1);
615 ath_tx_status_clear(sc);
617 for (i = 0; i < (HAL_NUM_TX_QUEUES - 6); i++) {
618 txq = ATH_TXQ(sc, i);
620 if (ATH_TXQ_SETUP(sc, i)) {
621 if (txq == sc->sc_cabq)
622 owltgt_tx_process_cabq(sc, txq);
624 owltgt_tx_processq(sc, txq, OWL_TXQ_ACTIVE);
628 ath_tx_status_send(sc);
631 void owltgt_tx_processq(struct ath_softc_tgt *sc, struct ath_txq *txq,
632 owl_txq_state_t txqstate)
634 struct ath_tx_buf *bf;
635 struct ath_tx_desc *ds;
639 if (asf_tailq_empty(&txq->axq_q)) {
640 txq->axq_link = NULL;
641 txq->axq_linkbuf = NULL;
645 bf = asf_tailq_first(&txq->axq_q);
648 status = ath_hal_txprocdesc(sc->sc_ah, ds);
650 if (status == HAL_EINPROGRESS) {
651 if (txqstate == OWL_TXQ_ACTIVE)
653 else if (txqstate == OWL_TXQ_STOPPED) {
654 __stats(sc, tx_stopfiltered);
655 ds->ds_txstat.ts_flags = 0;
656 ds->ds_txstat.ts_status = HAL_OK;
658 ds->ds_txstat.ts_flags = HAL_TX_SW_FILTERED;
662 ATH_TXQ_REMOVE_HEAD(txq, bf, bf_list);
663 if ((asf_tailq_empty(&txq->axq_q))) {
664 __stats(sc, tx_qnull);
665 txq->axq_link = NULL;
666 txq->axq_linkbuf = NULL;
672 ath_tx_status_update(sc, bf);
673 ath_buf_comp(sc, bf);
676 if (txqstate == OWL_TXQ_ACTIVE) {
677 ath_tgt_txq_schedule(sc, txq);
682 static struct ieee80211_frame* ATH_SKB2_WH(adf_nbuf_t skb)
687 adf_nbuf_peek_header(skb, &anbdata, &anblen);
688 return((struct ieee80211_frame *)anbdata);
692 ath_tgt_tid_drain(struct ath_softc_tgt *sc, struct ath_atx_tid *tid)
694 struct ath_tx_buf *bf;
696 while (!asf_tailq_empty(&tid->buf_q)) {
697 TAILQ_DEQ(&tid->buf_q, bf, bf_list);
698 ath_tx_freebuf(sc, bf);
701 tid->seq_next = tid->seq_start;
702 tid->baw_tail = tid->baw_head;
705 static void ath_tgt_tx_comp_normal(struct ath_softc_tgt *sc,
706 struct ath_tx_buf *bf)
708 struct ath_node_target *an = ATH_NODE_TARGET(bf->bf_node);
709 struct ath_desc *ds = bf->bf_lastds;
710 ath_atx_tid_t *tid = ATH_AN_2_TID(an, bf->bf_tidno);
712 if (tid->flag & TID_CLEANUP_INPROGRES) {
713 owl_tgt_tid_cleanup(sc, tid);
717 ath_tx_uc_comp(sc, bf);
720 ath_tx_freebuf(sc, bf);
723 static struct ieee80211_node_target * ath_tgt_find_node(struct ath_softc_tgt *sc,
724 a_int32_t node_index)
726 struct ath_node_target *an;
728 struct ieee80211_node_target *ni;
730 if (node_index > TARGET_NODE_MAX)
733 an = &sc->sc_sta[node_index];
737 if (ni->ni_vap == NULL) {
746 static struct ath_buf* ath_buf_alloc(struct ath_softc_tgt *sc)
748 struct ath_tx_buf *bf = NULL;
750 bf = asf_tailq_first(&sc->sc_txbuf);
752 adf_os_mem_set(&bf->bf_state, 0, sizeof(struct ath_buf_state));
753 asf_tailq_remove(&sc->sc_txbuf, bf, bf_list);
761 struct ath_buf* ath_tgt_tx_prepare(struct ath_softc_tgt *sc,
762 adf_nbuf_t skb, ath_data_hdr_t *dh)
764 struct ath_tx_buf *bf;
766 struct ieee80211_node_target *ni;
767 a_uint32_t flags = adf_os_ntohl(dh->flags);
768 struct ath_atx_tid *tid;
770 ni = ath_tgt_find_node(sc, dh->ni_index);
774 tid = ATH_AN_2_TID(ATH_NODE_TARGET(ni), dh->tidno);
775 if (tid->flag & TID_REINITIALIZE) {
776 adf_os_print("drop frame due to TID reinit\n");
780 bf = ath_buf_alloc(sc);
782 __stats(sc, tx_nobufs);
786 bf->bf_tidno = dh->tidno;
787 bf->bf_txq = TID_TO_ACTXQ(bf->bf_tidno);
788 bf->bf_keytype = dh->keytype;
789 bf->bf_keyix = dh->keyix;
790 bf->bf_protmode = dh->flags & (IEEE80211_PROT_RTSCTS | IEEE80211_PROT_CTSONLY);
791 bf->bf_node = (struct ath_node_target *)ni;
793 adf_nbuf_queue_add(&bf->bf_skbhead, skb);
794 skb = adf_nbuf_queue_first(&(bf->bf_skbhead));
796 if (adf_nbuf_queue_len(&(bf->bf_skbhead)) == 0) {
797 __stats(sc, tx_noskbs);
805 ath_tgt_txbuf_setup(sc, bf, dh);
807 ath_tx_tgt_setds(sc, bf);
812 static void ath_tgt_tx_seqno_normal(struct ath_tx_buf *bf)
814 struct ieee80211_node_target *ni = bf->bf_node;
815 struct ath_node_target *an = ATH_NODE_TARGET(ni);
816 struct ieee80211_frame *wh = ATH_SKB_2_WH(bf->bf_skb);
817 struct ath_atx_tid *tid = ATH_AN_2_TID(an, bf->bf_tidno);
819 u_int8_t fragno = (wh->i_seq[0] & 0xf);
821 INCR(ni->ni_txseqmgmt, IEEE80211_SEQ_MAX);
823 bf->bf_seqno = (tid->seq_next << IEEE80211_SEQ_SEQ_SHIFT);
825 *(u_int16_t *)wh->i_seq = adf_os_cpu_to_le16(bf->bf_seqno);
826 wh->i_seq[0] |= fragno;
828 if (!(wh->i_fc[1] & IEEE80211_FC1_MORE_FRAG))
829 INCR(tid->seq_next, IEEE80211_SEQ_MAX);
832 static a_int32_t ath_key_setup(struct ieee80211_node_target *ni,
833 struct ath_tx_buf *bf)
835 struct ieee80211_frame *wh = ATH_SKB_2_WH(bf->bf_skb);
836 const struct ieee80211_cipher *cip;
837 struct ieee80211_key *k;
839 if (!(wh->i_fc[1] & IEEE80211_FC1_WEP)) {
840 bf->bf_keytype = HAL_KEY_TYPE_CLEAR;
841 bf->bf_keyix = HAL_TXKEYIX_INVALID;
845 switch (bf->bf_keytype) {
846 case HAL_KEY_TYPE_WEP:
847 bf->bf_pktlen += IEEE80211_WEP_ICVLEN;
849 case HAL_KEY_TYPE_AES:
850 bf->bf_pktlen += IEEE80211_WEP_MICLEN;
852 case HAL_KEY_TYPE_TKIP:
853 bf->bf_pktlen += IEEE80211_WEP_ICVLEN;
859 if (bf->bf_keytype == HAL_KEY_TYPE_AES ||
860 bf->bf_keytype == HAL_KEY_TYPE_TKIP)
861 ieee80211_tgt_crypto_encap(wh, ni, bf->bf_keytype);
866 static void ath_tgt_txq_add_ucast(struct ath_softc_tgt *sc, struct ath_tx_buf *bf)
868 struct ath_hal *ah = sc->sc_ah;
870 struct ath_node_target *an;
872 static a_int32_t count = 0,i;
873 volatile a_int32_t txe_val;
879 status = ath_hal_txprocdesc(sc->sc_ah, bf->bf_lastds);
881 ATH_TXQ_INSERT_TAIL(txq, bf, bf_list);
883 if (txq->axq_link == NULL) {
884 ath_hal_puttxbuf(ah, txq->axq_qnum, ATH_BUF_GET_DESC_PHY_ADDR(bf));
886 *txq->axq_link = ATH_BUF_GET_DESC_PHY_ADDR(bf);
888 txe_val = OS_REG_READ(ah, 0x840);
889 if (!(txe_val & (1<< txq->axq_qnum)))
890 ath_hal_puttxbuf(ah, txq->axq_qnum, ATH_BUF_GET_DESC_PHY_ADDR(bf));
893 txq->axq_link = &bf->bf_lastds->ds_link;
894 ath_hal_txstart(ah, txq->axq_qnum);
897 static a_int32_t ath_tgt_txbuf_setup(struct ath_softc_tgt *sc,
898 struct ath_tx_buf *bf,
902 struct ath_node_target *an = ATH_NODE_TARGET(bf->bf_node);
903 struct ieee80211_frame *wh = ATH_SKB2_WH(bf->bf_skb);
904 struct ieee80211_node_target *ni = (struct ieee80211_node_target *)an;
905 struct ieee80211vap_target *vap = ni->ni_vap;
906 struct ieee80211com_target *ic = &sc->sc_ic;
907 a_int32_t retval, fragno = 0;
908 a_uint32_t flags = adf_os_ntohl(dh->flags);
910 ath_tgt_tx_seqno_normal(bf);
912 bf->bf_txq_add = ath_tgt_txq_add_ucast;
913 bf->bf_hdrlen = ieee80211_anyhdrsize(wh);
914 bf->bf_pktlen = ath_get_pktlen(bf, bf->bf_hdrlen);
915 bf->bf_ismcast = IEEE80211_IS_MULTICAST(wh->i_addr1);
917 if ((retval = ath_key_setup(bf->bf_node, bf)) < 0)
920 if (flags & ATH_SHORT_PREAMBLE)
921 bf->bf_shpream = AH_TRUE;
923 bf->bf_shpream = AH_FALSE;
925 bf->bf_flags = HAL_TXDESC_CLRDMASK;
926 bf->bf_atype = HAL_PKT_TYPE_NORMAL;
932 ath_get_pktlen(struct ath_buf *bf, a_int32_t hdrlen)
934 adf_nbuf_t skb = bf->bf_skb;
937 skb = adf_nbuf_queue_first(&bf->bf_skbhead);
938 pktlen = adf_nbuf_len(skb);
940 pktlen -= (hdrlen & 3);
941 pktlen += IEEE80211_CRC_LEN;
947 ath_tgt_tx_send_normal(struct ath_softc_tgt *sc, struct ath_tx_buf *bf)
949 struct ath_node_target *an = bf->bf_node;
950 struct ath_rc_series rcs[4];
951 struct ath_rc_series mrcs[4];
952 a_int32_t shortPreamble = 0;
953 a_int32_t isProbe = 0;
955 adf_os_mem_set(rcs, 0, sizeof(struct ath_rc_series)*4 );
956 adf_os_mem_set(mrcs, 0, sizeof(struct ath_rc_series)*4 );
958 if (!bf->bf_ismcast) {
959 ath_tgt_rate_findrate(sc, an, shortPreamble,
962 memcpy(bf->bf_rcs, rcs, sizeof(rcs));
964 mrcs[1].tries = mrcs[2].tries = mrcs[3].tries = 0;
965 mrcs[1].rix = mrcs[2].rix = mrcs[3].rix = 0;
969 memcpy(bf->bf_rcs, mrcs, sizeof(mrcs));
972 ath_buf_set_rate(sc, bf);
973 bf->bf_txq_add(sc, bf);
977 ath_tx_freebuf(struct ath_softc_tgt *sc, struct ath_tx_buf *bf)
980 struct ath_desc *bfd = NULL;
982 for (bfd = bf->bf_desc, i = 0; i < bf->bf_dmamap_info.nsegs; bfd++, i++) {
983 ath_hal_clr11n_aggr(sc->sc_ah, bfd);
984 ath_hal_set11n_burstduration(sc->sc_ah, bfd, 0);
985 ath_hal_set11n_virtualmorefrag(sc->sc_ah, bfd, 0);
988 ath_dma_unmap(sc, bf);
990 ath_tgt_skb_free(sc, &bf->bf_skbhead,bf->bf_endpt);
996 bf = ath_buf_toggle(sc, bf, 0);
998 bf->bf_isretried = 0;
1001 asf_tailq_insert_tail(&sc->sc_txbuf, bf, bf_list);
1005 ath_tx_uc_comp(struct ath_softc_tgt *sc, struct ath_tx_buf *bf)
1007 ath_tx_status_update(sc, bf);
1008 ath_update_stats(sc, bf);
1009 ath_rate_tx_complete(sc, ATH_NODE_TARGET(bf->bf_node),
1010 bf->bf_lastds, bf->bf_rcs, 1, 0);
1014 ath_update_stats(struct ath_softc_tgt *sc, struct ath_buf *bf)
1016 struct ieee80211_node_target *ni = bf->bf_node;
1017 struct ath_tx_desc *ds = bf->bf_desc;
1018 struct ath_node_target *an = ATH_NODE_TARGET(ni);
1020 struct ieee80211_cb *cb;
1022 if (ds->ds_txstat.ts_status == 0) {
1023 if (ds->ds_txstat.ts_rate & HAL_TXSTAT_ALTRATE)
1024 sc->sc_tx_stats.ast_tx_altrate++;
1026 if (ds->ds_txstat.ts_status & HAL_TXERR_XRETRY)
1027 sc->sc_tx_stats.ast_tx_xretries++;
1028 if (ds->ds_txstat.ts_status & HAL_TXERR_FIFO)
1029 sc->sc_tx_stats.ast_tx_fifoerr++;
1030 if (ds->ds_txstat.ts_status & HAL_TXERR_FILT)
1031 sc->sc_tx_stats.ast_tx_filtered++;
1032 if (ds->ds_txstat.ts_status & HAL_TXERR_TIMER_EXPIRED)
1033 sc->sc_tx_stats.ast_tx_timer_exp++;
1035 sr = ds->ds_txstat.ts_shortretry;
1036 lr = ds->ds_txstat.ts_longretry;
1037 sc->sc_tx_stats.ast_tx_shortretry += sr;
1038 sc->sc_tx_stats.ast_tx_longretry += lr;
1042 ath_tgt_send_mgt(struct ath_softc_tgt *sc,adf_nbuf_t hdr_buf, adf_nbuf_t skb,
1043 HTC_ENDPOINT_ID endpt)
1045 struct ieee80211_node_target *ni;
1046 struct ieee80211vap_target *vap;
1047 struct ath_vap_target *avp;
1048 struct ath_hal *ah = sc->sc_ah;
1049 a_uint8_t rix, txrate, ctsrate, cix = 0xff, *data;
1050 a_uint32_t ivlen = 0, icvlen = 0, subtype, flags, ctsduration, fval;
1051 a_int32_t i, iswep, ismcast, hdrlen, pktlen, try0, len;
1052 struct ath_desc *ds=NULL, *ds0=NULL;
1053 struct ath_txq *txq=NULL;
1054 struct ath_tx_buf *bf;
1056 const HAL_RATE_TABLE *rt;
1057 HAL_BOOL shortPreamble;
1058 struct ieee80211_frame *wh;
1059 struct ath_rc_series rcs[4];
1060 HAL_11N_RATE_SERIES series[4];
1062 struct ieee80211com_target *ic = &sc->sc_ic;
1066 adf_nbuf_peek_header(skb, &data, &len);
1067 adf_nbuf_pull_head(skb, sizeof(ath_mgt_hdr_t));
1069 adf_nbuf_peek_header(hdr_buf, &data, &len);
1072 adf_os_assert(len >= sizeof(ath_mgt_hdr_t));
1074 mh = (ath_mgt_hdr_t *)data;
1075 adf_nbuf_peek_header(skb, &data, &len);
1076 wh = (struct ieee80211_frame *)data;
1078 adf_os_mem_set(rcs, 0, sizeof(struct ath_rc_series)*4);
1079 adf_os_mem_set(series, 0, sizeof(HAL_11N_RATE_SERIES)*4);
1081 bf = asf_tailq_first(&sc->sc_txbuf);
1085 asf_tailq_remove(&sc->sc_txbuf, bf, bf_list);
1087 ni = ath_tgt_find_node(sc, mh->ni_index);
1091 bf->bf_endpt = endpt;
1092 bf->bf_cookie = mh->cookie;
1093 bf->bf_protmode = mh->flags & (IEEE80211_PROT_RTSCTS | IEEE80211_PROT_CTSONLY);
1094 txq = &sc->sc_txq[1];
1095 iswep = wh->i_fc[1] & IEEE80211_FC1_WEP;
1096 ismcast = IEEE80211_IS_MULTICAST(wh->i_addr1);
1097 hdrlen = ieee80211_anyhdrsize(wh);
1099 keyix = HAL_TXKEYIX_INVALID;
1100 pktlen -= (hdrlen & 3);
1101 pktlen += IEEE80211_CRC_LEN;
1106 adf_nbuf_map(sc->sc_dev, bf->bf_dmamap, skb, ADF_OS_DMA_TO_DEVICE);
1109 adf_nbuf_queue_add(&bf->bf_skbhead, skb);
1112 rt = sc->sc_currates;
1113 adf_os_assert(rt != NULL);
1115 if (mh->flags == ATH_SHORT_PREAMBLE)
1116 shortPreamble = AH_TRUE;
1118 shortPreamble = AH_FALSE;
1120 flags = HAL_TXDESC_CLRDMASK;
1122 switch (wh->i_fc[0] & IEEE80211_FC0_TYPE_MASK) {
1123 case IEEE80211_FC0_TYPE_MGT:
1124 subtype = wh->i_fc[0] & IEEE80211_FC0_SUBTYPE_MASK;
1126 if (subtype == IEEE80211_FC0_SUBTYPE_PROBE_RESP)
1127 atype = HAL_PKT_TYPE_PROBE_RESP;
1128 else if (subtype == IEEE80211_FC0_SUBTYPE_ATIM)
1129 atype = HAL_PKT_TYPE_ATIM;
1131 atype = HAL_PKT_TYPE_NORMAL;
1135 atype = HAL_PKT_TYPE_NORMAL;
1139 avp = &sc->sc_vap[mh->vap_index];
1141 rcs[0].rix = ath_get_minrateidx(sc, avp);
1142 rcs[0].tries = ATH_TXMAXTRY;
1145 adf_os_mem_copy(bf->bf_rcs, rcs, sizeof(rcs));
1147 try0 = rcs[0].tries;
1148 txrate = rt->info[rix].rateCode;
1151 txrate |= rt->info[rix].shortPreamble;
1158 flags |= HAL_TXDESC_NOACK;
1160 } else if (pktlen > vap->iv_rtsthreshold) {
1161 flags |= HAL_TXDESC_RTSENA;
1162 cix = rt->info[rix].controlRate;
1165 if ((bf->bf_protmode != IEEE80211_PROT_NONE) &&
1166 rt->info[rix].phy == IEEE80211_T_OFDM &&
1167 (flags & HAL_TXDESC_NOACK) == 0) {
1168 cix = rt->info[sc->sc_protrix].controlRate;
1169 sc->sc_tx_stats.ast_tx_protect++;
1172 *(a_uint16_t *)&wh->i_seq[0] = adf_os_cpu_to_le16(ni->ni_txseqmgmt <<
1173 IEEE80211_SEQ_SEQ_SHIFT);
1174 INCR(ni->ni_txseqmgmt, IEEE80211_SEQ_MAX);
1177 if (flags & (HAL_TXDESC_RTSENA|HAL_TXDESC_CTSENA)) {
1178 adf_os_assert(cix != 0xff);
1179 ctsrate = rt->info[cix].rateCode;
1180 if (shortPreamble) {
1181 ctsrate |= rt->info[cix].shortPreamble;
1182 if (flags & HAL_TXDESC_RTSENA) /* SIFS + CTS */
1183 ctsduration += rt->info[cix].spAckDuration;
1184 if ((flags & HAL_TXDESC_NOACK) == 0) /* SIFS + ACK */
1185 ctsduration += rt->info[cix].spAckDuration;
1187 if (flags & HAL_TXDESC_RTSENA) /* SIFS + CTS */
1188 ctsduration += rt->info[cix].lpAckDuration;
1189 if ((flags & HAL_TXDESC_NOACK) == 0) /* SIFS + ACK */
1190 ctsduration += rt->info[cix].lpAckDuration;
1192 ctsduration += ath_hal_computetxtime(ah,
1193 rt, pktlen, rix, shortPreamble);
1198 flags |= HAL_TXDESC_INTREQ;
1200 ath_hal_setuptxdesc(ah, ds
1213 , ATH_COMP_PROC_NO_COMP_NO_CCS);
1215 bf->bf_flags = flags;
1218 * Set key type in tx desc while sending the encrypted challenge to AP
1219 * in Auth frame 3 of Shared Authentication, owl needs this.
1221 if (iswep && (keyix != HAL_TXKEYIX_INVALID) &&
1222 (wh->i_fc[0] & IEEE80211_FC0_SUBTYPE_MASK) == IEEE80211_FC0_SUBTYPE_AUTH)
1223 ath_hal_fillkeytxdesc(ah, ds, mh->keytype);
1225 ath_filltxdesc(sc, bf);
1227 for (i=0; i<4; i++) {
1228 series[i].Tries = 2;
1229 series[i].Rate = txrate;
1230 series[i].ChSel = sc->sc_ic.ic_tx_chainmask;
1231 series[i].RateFlags = 0;
1233 ath_hal_set11n_ratescenario(ah, ds, 0, ctsrate, ctsduration, series, 4, 0);
1234 ath_tgt_txqaddbuf(sc, txq, bf, bf->bf_lastds);
1238 HTC_ReturnBuffers(sc->tgt_htc_handle, endpt, skb);
1243 ath_tgt_txqaddbuf(struct ath_softc_tgt *sc,
1244 struct ath_txq *txq, struct ath_buf *bf,
1245 struct ath_desc *lastds)
1247 struct ath_hal *ah = sc->sc_ah;
1249 ATH_TXQ_INSERT_TAIL(txq, bf, bf_list);
1251 if (txq->axq_link == NULL) {
1252 ath_hal_puttxbuf(ah, txq->axq_qnum, ATH_BUF_GET_DESC_PHY_ADDR(bf));
1254 *txq->axq_link = ATH_BUF_GET_DESC_PHY_ADDR(bf);
1257 txq->axq_link = &lastds->ds_link;
1258 ath_hal_txstart(ah, txq->axq_qnum);
1261 void ath_tgt_handle_normal(struct ath_softc_tgt *sc, struct ath_tx_buf *bf)
1264 struct ath_node_target *an;
1265 struct ath_desc *ds;
1266 struct ath_txq *txq = bf->bf_txq;
1267 a_bool_t queue_frame;
1269 an = (struct ath_node_target *)bf->bf_node;
1272 tid = &an->tid[bf->bf_tidno];
1275 bf->bf_comp = ath_tgt_tx_comp_normal;
1276 INCR(tid->seq_start, IEEE80211_SEQ_MAX);
1277 ath_tgt_tx_send_normal(sc, bf);
1281 ath_tgt_tx_enqueue(struct ath_txq *txq, struct ath_atx_tid *tid)
1289 tid->sched = AH_TRUE;
1290 asf_tailq_insert_tail(&txq->axq_tidq, tid, tid_qelem);
1294 ath_tgt_txq_schedule(struct ath_softc *sc, struct ath_txq *txq)
1296 struct ath_atx_tid *tid;
1300 struct ieee80211_node *ieee_node;
1301 u_int32_t aggr_limit_with_rts;
1306 TAILQ_DEQ(&txq->axq_tidq, tid, tid_qelem);
1311 tid->sched = AH_FALSE;
1316 if (!(tid->flag & TID_AGGR_ENABLED))
1317 ath_tgt_tx_sched_normal(sc,tid);
1319 ath_tgt_tx_sched_aggr(sc,tid);
1323 if (!asf_tailq_empty(&tid->buf_q)) {
1324 ath_tgt_tx_enqueue(txq, tid);
1327 } while (!asf_tailq_empty(&txq->axq_tidq) && !bdone);
1331 ath_tgt_handle_aggr(struct ath_softc_tgt *sc, struct ath_tx_buf *bf)
1334 struct ath_node_target *an;
1335 struct ath_desc *ds;
1336 struct ath_txq *txq = bf->bf_txq;
1337 a_bool_t queue_frame, within_baw;
1339 an = (struct ath_node_target *)bf->bf_node;
1342 tid = &an->tid[bf->bf_tidno];
1345 bf->bf_comp = ath_tgt_tx_comp_aggr;
1347 within_baw = BAW_WITHIN(tid->seq_start, tid->baw_size,
1348 SEQNO_FROM_BF_SEQNO(bf->bf_seqno));
1350 queue_frame = ( (txq->axq_depth >= ATH_AGGR_MIN_QDEPTH) ||
1351 (!asf_tailq_empty(&tid->buf_q)) ||
1352 (tid->paused) || (!within_baw) );
1355 asf_tailq_insert_tail(&tid->buf_q, bf, bf_list);
1356 ath_tgt_tx_enqueue(txq, tid);
1358 ath_tx_addto_baw(tid, bf);
1359 __stats(sc, txaggr_nframes);
1360 ath_tgt_tx_send_normal(sc, bf);
1365 ath_tgt_tx_sched_normal(struct ath_softc_tgt *sc, ath_atx_tid_t *tid)
1368 struct ath_txq *txq =TID_TO_ACTXQ(tid->tidno);;
1371 if (asf_tailq_empty(&tid->buf_q))
1374 bf = asf_tailq_first(&tid->buf_q);
1375 asf_tailq_remove(&tid->buf_q, bf, bf_list);
1376 ath_tgt_tx_send_normal(sc, bf);
1378 } while (txq->axq_depth < ATH_AGGR_MIN_QDEPTH);
1382 ath_tgt_tx_sched_aggr(struct ath_softc_tgt *sc, ath_atx_tid_t *tid)
1384 struct ath_tx_buf *bf, *bf_last;
1385 ATH_AGGR_STATUS status;
1387 struct ath_txq *txq = TID_TO_ACTXQ(tid->tidno);
1388 struct ath_desc *ds = NULL;
1392 if (asf_tailq_empty(&tid->buf_q))
1396 if (asf_tailq_empty(&tid->buf_q))
1399 asf_tailq_init(&bf_q);
1401 status = ath_tgt_tx_form_aggr(sc, tid, &bf_q);
1403 if (asf_tailq_empty(&bf_q))
1406 bf = asf_tailq_first(&bf_q);
1407 bf_last = asf_tailq_last(&bf_q, ath_bufhead_s);
1409 if (bf->bf_nframes == 1) {
1411 if(bf->bf_retries == 0)
1412 __stats(sc, txaggr_single);
1414 bf->bf_lastds = &(bf->bf_descarr[bf->bf_dmamap_info.nsegs -1]);
1415 bf->bf_lastds->ds_link = 0;
1418 for(ds = bf->bf_desc; ds <= bf->bf_lastds; ds++)
1419 ath_hal_clr11n_aggr(sc->sc_ah, ds);
1421 ath_buf_set_rate(sc, bf);
1422 bf->bf_txq_add(sc, bf);
1427 bf_last->bf_next = NULL;
1428 bf_last->bf_lastds->ds_link = 0;
1429 bf_last->bf_ndelim = 0;
1432 ath_buf_set_rate(sc, bf);
1433 ath_hal_set11n_aggr_first(sc->sc_ah, bf->bf_desc, bf->bf_al,
1435 bf->bf_lastds = bf_last->bf_lastds;
1437 for (i = 0; i < bf_last->bf_dmamap_info.nsegs; i++)
1438 ath_hal_set11n_aggr_last(sc->sc_ah, &bf_last->bf_descarr[i]);
1440 if (status == ATH_AGGR_8K_LIMITED) {
1445 bf->bf_txq_add(sc, bf);
1446 } while (txq->axq_depth < ATH_AGGR_MIN_QDEPTH &&
1447 status != ATH_TGT_AGGR_BAW_CLOSED);
1450 static u_int32_t ath_lookup_rate(struct ath_softc_tgt *sc,
1451 struct ath_node_target *an,
1452 struct ath_tx_buf *bf)
1455 u_int32_t max4msframelen, frame_length;
1456 u_int16_t aggr_limit, legacy=0;
1457 const HAL_RATE_TABLE *rt = sc->sc_currates;
1458 struct ieee80211_node_target *ieee_node = (struct ieee80211_node_target *)an;
1460 if (bf->bf_ismcast) {
1461 bf->bf_rcs[1].tries = bf->bf_rcs[2].tries = bf->bf_rcs[3].tries = 0;
1462 bf->bf_rcs[0].rix = 0xb;
1463 bf->bf_rcs[0].tries = ATH_TXMAXTRY - 1;
1464 bf->bf_rcs[0].flags = 0;
1466 ath_tgt_rate_findrate(sc, an, AH_TRUE, 0, ATH_TXMAXTRY-1, 4, 1,
1467 ATH_RC_PROBE_ALLOWED, bf->bf_rcs, &prate);
1470 max4msframelen = IEEE80211_AMPDU_LIMIT_MAX;
1472 for (i = 0; i < 4; i++) {
1473 if (bf->bf_rcs[i].tries) {
1474 frame_length = bf->bf_rcs[i].max4msframelen;
1476 if (rt->info[bf->bf_rcs[i].rix].phy != IEEE80211_T_HT) {
1481 max4msframelen = ATH_MIN(max4msframelen, frame_length);
1485 if (prate || legacy)
1488 if (sc->sc_ic.ic_enable_coex)
1489 aggr_limit = ATH_MIN((max4msframelen*3)/8, sc->sc_ic.ic_ampdu_limit);
1491 aggr_limit = ATH_MIN(max4msframelen, sc->sc_ic.ic_ampdu_limit);
1493 if (ieee_node->ni_maxampdu)
1494 aggr_limit = ATH_MIN(aggr_limit, ieee_node->ni_maxampdu);
1499 int ath_tgt_tx_form_aggr(struct ath_softc_tgt *sc, ath_atx_tid_t *tid,
1502 struct ath_tx_buf *bf_first ,*bf_prev = NULL;
1503 int nframes = 0, rl = 0;;
1504 struct ath_desc *ds = NULL;
1505 struct ath_tx_buf *bf;
1506 u_int16_t aggr_limit = (64*1024 -1), al = 0, bpad = 0, al_delta;
1507 u_int16_t h_baw = tid->baw_size/2, prev_al = 0, prev_frames = 0;
1509 bf_first = asf_tailq_first(&tid->buf_q);
1512 bf = asf_tailq_first(&tid->buf_q);
1515 if (!BAW_WITHIN(tid->seq_start, tid->baw_size,
1516 SEQNO_FROM_BF_SEQNO(bf->bf_seqno))) {
1518 bf_first->bf_al= al;
1519 bf_first->bf_nframes = nframes;
1520 return ATH_TGT_AGGR_BAW_CLOSED;
1524 aggr_limit = ath_lookup_rate(sc, tid->an, bf);
1528 al_delta = ATH_AGGR_DELIM_SZ + bf->bf_pktlen;
1530 if (nframes && (aggr_limit < (al + bpad + al_delta + prev_al))) {
1531 bf_first->bf_al= al;
1532 bf_first->bf_nframes = nframes;
1533 return ATH_TGT_AGGR_LIMITED;
1537 if ((nframes + prev_frames) >= ATH_MIN((h_baw), 17)) {
1539 if ((nframes + prev_frames) >= ATH_MIN((h_baw), 22)) {
1541 bf_first->bf_al= al;
1542 bf_first->bf_nframes = nframes;
1543 return ATH_TGT_AGGR_LIMITED;
1546 ath_tx_addto_baw(tid, bf);
1547 asf_tailq_remove(&tid->buf_q, bf, bf_list);
1548 asf_tailq_insert_tail(bf_q, bf, bf_list);
1553 adf_os_assert(bf->bf_comp == ath_tgt_tx_comp_aggr);
1555 al += bpad + al_delta;
1556 bf->bf_ndelim = ATH_AGGR_GET_NDELIM(bf->bf_pktlen);
1558 switch (bf->bf_keytype) {
1559 case HAL_KEY_TYPE_AES:
1560 bf->bf_ndelim += ATH_AGGR_ENCRYPTDELIM;
1562 case HAL_KEY_TYPE_WEP:
1563 case HAL_KEY_TYPE_TKIP:
1564 bf->bf_ndelim += 64;
1566 case HAL_KEY_TYPE_WAPI:
1567 bf->bf_ndelim += 12;
1573 bpad = PADBYTES(al_delta) + (bf->bf_ndelim << 2);
1576 bf_prev->bf_next = bf;
1577 bf_prev->bf_lastds->ds_link = ATH_BUF_GET_DESC_PHY_ADDR(bf);
1581 for(ds = bf->bf_desc; ds <= bf->bf_lastds; ds++)
1582 ath_hal_set11n_aggr_middle(sc->sc_ah, ds, bf->bf_ndelim);
1584 } while (!asf_tailq_empty(&tid->buf_q));
1586 bf_first->bf_al= al;
1587 bf_first->bf_nframes = nframes;
1589 return ATH_TGT_AGGR_DONE;
1592 void ath_tx_addto_baw(ath_atx_tid_t *tid, struct ath_tx_buf *bf)
1596 if (bf->bf_isretried) {
1600 index = ATH_BA_INDEX(tid->seq_start, SEQNO_FROM_BF_SEQNO(bf->bf_seqno));
1601 cindex = (tid->baw_head + index) & (ATH_TID_MAX_BUFS - 1);
1603 TX_BUF_BITMAP_SET(tid->tx_buf_bitmap, cindex);
1605 if (index >= ((tid->baw_tail - tid->baw_head) & (ATH_TID_MAX_BUFS - 1))) {
1606 tid->baw_tail = cindex;
1607 INCR(tid->baw_tail, ATH_TID_MAX_BUFS);
1611 void ath_tgt_tx_comp_aggr(struct ath_softc_tgt *sc, struct ath_tx_buf *bf)
1613 struct ath_node_target *an = ATH_NODE_TARGET(bf->bf_node);
1614 ath_atx_tid_t *tid = ATH_AN_2_TID(an, bf->bf_tidno);
1615 struct ath_tx_desc lastds;
1616 struct ath_tx_desc *ds = &lastds;
1617 struct ath_rc_series rcs[4];
1622 int nframes = bf->bf_nframes;
1623 struct ath_buf *bf_next;
1626 struct ath_buf *bar = NULL;
1627 struct ath_txq *txq;
1631 if (tid->flag & TID_CLEANUP_INPROGRES) {
1632 ath_tx_comp_cleanup(sc, bf);
1636 adf_os_mem_copy(ds, bf->bf_lastds, sizeof (struct ath_tx_desc));
1637 adf_os_mem_copy(rcs, bf->bf_rcs, sizeof(rcs));
1639 if (ds->ds_txstat.ts_flags == HAL_TX_SW_FILTERED) {
1644 if (!bf->bf_isaggr) {
1645 ath_tx_comp_unaggr(sc, bf);
1649 __stats(sc, tx_compaggr);
1651 asf_tailq_init(&bf_q);
1653 seq_st = ATH_DS_BA_SEQ(ds);
1654 ba = ATH_DS_BA_BITMAP(ds);
1655 tx_ok = (ATH_DS_TX_STATUS(ds) == HAL_OK);
1657 if (ATH_DS_TX_STATUS(ds) & HAL_TXERR_XRETRY) {
1658 ath_tx_comp_aggr_error(sc, bf, tid);
1662 if (tx_ok && !ATH_DS_TX_BA(ds)) {
1663 __stats(sc, txaggr_babug);
1664 adf_os_print("BA Bug?\n");
1665 ath_tx_comp_aggr_error(sc, bf, tid);
1670 ba_index = ATH_BA_INDEX(seq_st, SEQNO_FROM_BF_SEQNO(bf->bf_seqno));
1671 bf_next = bf->bf_next;
1673 if (tx_ok && ATH_BA_ISSET(ba, ba_index)) {
1674 __stats(sc, txaggr_compgood);
1675 ath_tx_update_baw(tid, SEQNO_FROM_BF_SEQNO(bf->bf_seqno));
1676 ath_tx_status_update_aggr(sc, bf, ds, rcs, 1);
1677 ath_tx_freebuf(sc, bf);
1679 ath_tx_retry_subframe(sc, bf, &bf_q, &bar);
1685 ath_update_aggr_stats(sc, ds, nframes, nbad);
1686 ath_rate_tx_complete(sc, an, ds, rcs, nframes, nbad);
1689 ath_bar_tx(sc, tid, bar);
1692 if (!asf_tailq_empty(&bf_q)) {
1693 __stats(sc, txaggr_prepends);
1694 TAILQ_INSERTQ_HEAD(&tid->buf_q, &bf_q, bf_list);
1695 ath_tgt_tx_enqueue(txq, tid);
1700 ath_tx_comp_aggr_error(struct ath_softc_tgt *sc, struct ath_tx_buf *bf,
1705 struct ath_tx_desc lastds;
1706 struct ath_desc *ds = &lastds;
1707 struct ath_rc_series rcs[4];
1708 struct ath_buf *bar = NULL;
1709 struct ath_buf *bf_next;
1710 int nframes = bf->bf_nframes;
1712 struct ath_txq *txq;
1714 asf_tailq_init(&bf_q);
1717 adf_os_mem_copy(ds, bf->bf_lastds, sizeof (struct ath_tx_desc));
1718 adf_os_mem_copy(rcs, bf->bf_rcs, sizeof(rcs));
1721 bf_next = bf->bf_next;
1722 ath_tx_retry_subframe(sc, bf, &bf_q, &bar);
1726 ath_update_aggr_stats(sc, ds, nframes, nframes);
1727 ath_rate_tx_complete(sc, tid->an, ds, rcs, nframes, nframes);
1730 ath_bar_tx(sc, tid, bar);
1733 if (!asf_tailq_empty(&bf_q)) {
1734 __stats(sc, txaggr_prepends);
1735 TAILQ_INSERTQ_HEAD(&tid->buf_q, &bf_q, bf_list);
1736 ath_tgt_tx_enqueue(txq, tid);
1741 ath_tx_comp_cleanup(struct ath_softc_tgt *sc, struct ath_tx_buf *bf)
1744 struct ath_node_target *an = ATH_NODE_TARGET(bf->bf_node);
1745 ath_atx_tid_t *tid = ATH_AN_2_TID(an, bf->bf_tidno);
1746 struct ath_tx_desc lastds;
1747 struct ath_tx_desc *ds = &lastds;
1748 struct ath_rc_series rcs[4];
1753 int nframes = bf->bf_nframes;
1754 struct ath_buf *bf_next;
1757 adf_os_mem_copy(ds, bf->bf_lastds, sizeof (struct ath_tx_desc));
1758 adf_os_mem_copy(rcs, bf->bf_rcs, sizeof(rcs));
1760 seq_st = ATH_DS_BA_SEQ(ds);
1761 ba = ATH_DS_BA_BITMAP(ds);
1762 tx_ok = (ATH_DS_TX_STATUS(ds) == HAL_OK);
1764 if (!bf->bf_isaggr) {
1765 ath_update_stats(sc, bf);
1767 __stats(sc, tx_compunaggr);
1769 ath_tx_status_update(sc, bf);
1771 ath_tx_freebuf(sc, bf);
1773 if (tid->flag & TID_CLEANUP_INPROGRES) {
1774 owl_tgt_tid_cleanup(sc, tid);
1782 ba_index = ATH_BA_INDEX(seq_st, SEQNO_FROM_BF_SEQNO(bf->bf_seqno));
1783 bf_next = bf->bf_next;
1785 ath_tx_status_update_aggr(sc, bf, ds, rcs, 0);
1787 ath_tx_freebuf(sc, bf);
1791 tid->flag &= ~TID_CLEANUP_INPROGRES;
1792 ath_aggr_resume_tid(sc, tid);
1799 ath_update_aggr_stats(sc, ds, nframes, nbad);
1800 ath_rate_tx_complete(sc, an, ds, rcs, nframes, nbad);
1804 ath_tx_retry_subframe(struct ath_softc_tgt *sc, struct ath_tx_buf *bf,
1805 ath_bufhead *bf_q, struct ath_tx_buf **bar)
1808 struct ath_node_target *an = ATH_NODE_TARGET(bf->bf_node);
1809 ath_atx_tid_t *tid = ATH_AN_2_TID(an, bf->bf_tidno);
1810 struct ath_desc *ds = NULL;
1813 __stats(sc, txaggr_compretries);
1815 for(ds = bf->bf_desc, i = 0; i < bf->bf_dmamap_info.nsegs; ds++, i++) {
1816 ath_hal_clr11n_aggr(sc->sc_ah, ds);
1817 ath_hal_set11n_burstduration(sc->sc_ah, ds, 0);
1818 ath_hal_set11n_virtualmorefrag(sc->sc_ah, ds, 0);
1821 if (bf->bf_retries >= OWLMAX_RETRIES) {
1822 __stats(sc, txaggr_xretries);
1823 ath_tx_update_baw(tid, SEQNO_FROM_BF_SEQNO(bf->bf_seqno));
1824 ath_tx_status_update_aggr(sc, bf, bf->bf_lastds, NULL, 0);
1829 ath_tx_freebuf(sc, bf);
1834 __stats(sc, txaggr_errlast);
1835 bf = ath_buf_toggle(sc, bf, 1);
1837 bf->bf_lastds = &(bf->bf_descarr[bf->bf_dmamap_info.nsegs - 1]);
1839 ath_tx_set_retry(sc, bf);
1840 asf_tailq_insert_tail(bf_q, bf, bf_list);
1844 ath_update_aggr_stats(struct ath_softc_tgt *sc,
1845 struct ath_tx_desc *ds, int nframes,
1849 u_int8_t status = ATH_DS_TX_STATUS(ds);
1850 u_int8_t txflags = ATH_DS_TX_FLAGS(ds);
1852 __statsn(sc, txaggr_longretries, ds->ds_txstat.ts_longretry);
1853 __statsn(sc, txaggr_shortretries, ds->ds_txstat.ts_shortretry);
1855 if (txflags & HAL_TX_DESC_CFG_ERR)
1856 __stats(sc, txaggr_desc_cfgerr);
1858 if (txflags & HAL_TX_DATA_UNDERRUN)
1859 __stats(sc, txaggr_data_urun);
1861 if (txflags & HAL_TX_DELIM_UNDERRUN)
1862 __stats(sc, txaggr_delim_urun);
1868 if (status & HAL_TXERR_XRETRY)
1869 __stats(sc, txaggr_compxretry);
1871 if (status & HAL_TXERR_FILT)
1872 __stats(sc, txaggr_filtered);
1874 if (status & HAL_TXERR_FIFO)
1875 __stats(sc, txaggr_fifo);
1877 if (status & HAL_TXERR_XTXOP)
1878 __stats(sc, txaggr_xtxop);
1880 if (status & HAL_TXERR_TIMER_EXPIRED)
1881 __stats(sc, txaggr_timer_exp);
1885 ath_tx_comp_unaggr(struct ath_softc_tgt *sc, struct ath_tx_buf *bf)
1887 struct ath_node_target *an = ATH_NODE_TARGET(bf->bf_node);
1888 ath_atx_tid_t *tid = ATH_AN_2_TID(an, bf->bf_tidno);
1889 struct ath_desc *ds = bf->bf_lastds;
1891 ath_update_stats(sc, bf);
1892 ath_rate_tx_complete(sc, an, ds, bf->bf_rcs, 1, 0);
1894 if (ATH_DS_TX_STATUS(ds) & HAL_TXERR_XRETRY) {
1895 ath_tx_retry_unaggr(sc, bf);
1898 __stats(sc, tx_compunaggr);
1900 ath_tx_update_baw(tid, SEQNO_FROM_BF_SEQNO(bf->bf_seqno));
1901 ath_tx_status_update(sc, bf);
1902 ath_tx_freebuf(sc, bf);
1906 ath_tx_retry_unaggr(struct ath_softc_tgt *sc, struct ath_tx_buf *bf)
1908 struct ath_node_target *an = ATH_NODE_TARGET(bf->bf_node);
1909 ath_atx_tid_t *tid = ATH_AN_2_TID(an, bf->bf_tidno);
1910 struct ath_txq *txq;
1914 if (bf->bf_retries >= OWLMAX_RETRIES) {
1915 __stats(sc, txunaggr_xretry);
1916 ath_tx_update_baw(tid, SEQNO_FROM_BF_SEQNO(bf->bf_seqno));
1917 ath_tx_status_update(sc, bf);
1918 ath_bar_tx(sc, tid, bf);
1922 __stats(sc, txunaggr_compretries);
1923 if (!bf->bf_lastds->ds_link) {
1924 __stats(sc, txunaggr_errlast);
1925 bf = ath_buf_toggle(sc, bf, 1);
1928 ath_tx_set_retry(sc, bf);
1929 asf_tailq_insert_head(&tid->buf_q, bf, bf_list);
1930 ath_tgt_tx_enqueue(txq, tid);
1934 ath_tx_update_baw(ath_atx_tid_t *tid, int seqno)
1939 index = ATH_BA_INDEX(tid->seq_start, seqno);
1940 cindex = (tid->baw_head + index) & (ATH_TID_MAX_BUFS - 1);
1942 TX_BUF_BITMAP_CLR(tid->tx_buf_bitmap, cindex);
1944 while (tid->baw_head != tid->baw_tail &&
1945 (!TX_BUF_BITMAP_IS_SET(tid->tx_buf_bitmap, tid->baw_head))) {
1946 INCR(tid->seq_start, IEEE80211_SEQ_MAX);
1947 INCR(tid->baw_head, ATH_TID_MAX_BUFS);
1951 static void ath_tx_set_retry(struct ath_softc_tgt *sc, struct ath_tx_buf *bf)
1953 struct ieee80211_frame *wh;
1955 __stats(sc, txaggr_retries);
1957 bf->bf_isretried = 1;
1959 wh = ATH_SKB_2_WH(bf->bf_skb);
1960 wh->i_fc[1] |= IEEE80211_FC1_RETRY;
1963 void ath_tgt_tx_cleanup(struct ath_softc_tgt *sc, struct ath_node_target *an,
1964 ath_atx_tid_t *tid, a_uint8_t discard_all)
1966 struct ath_tx_buf *bf;
1967 struct ath_tx_buf *bf_next;
1968 struct ath_txq *txq;
1970 txq = TID_TO_ACTXQ(tid->tidno);
1972 bf = asf_tailq_first(&tid->buf_q);
1975 if (discard_all || bf->bf_isretried) {
1976 bf_next = asf_tailq_next(bf, bf_list);
1977 TAILQ_DEQ(&tid->buf_q, bf, bf_list);
1978 if (bf->bf_isretried)
1979 ath_tx_update_baw(tid, SEQNO_FROM_BF_SEQNO(bf->bf_seqno));
1980 ath_tx_freebuf(sc, bf);
1984 bf->bf_comp = ath_tgt_tx_comp_normal;
1985 bf = asf_tailq_next(bf, bf_list);
1988 ath_aggr_pause_tid(sc, tid);
1990 while (tid->baw_head != tid->baw_tail) {
1991 if (TX_BUF_BITMAP_IS_SET(tid->tx_buf_bitmap, tid->baw_head)) {
1993 tid->flag |= TID_CLEANUP_INPROGRES;
1994 TX_BUF_BITMAP_CLR(tid->tx_buf_bitmap, tid->baw_head);
1996 INCR(tid->baw_head, ATH_TID_MAX_BUFS);
1997 INCR(tid->seq_start, IEEE80211_SEQ_MAX);
2000 if (!(tid->flag & TID_CLEANUP_INPROGRES)) {
2001 ath_aggr_resume_tid(sc, tid);
2005 /******************/
2006 /* BAR Management */
2007 /******************/
2009 static void ath_tgt_delba_send(struct ath_softc_tgt *sc,
2010 struct ieee80211_node_target *ni,
2011 a_uint8_t tidno, a_uint8_t initiator,
2012 a_uint16_t reasoncode)
2014 struct ath_node_target *an = ATH_NODE_TARGET(ni);
2015 ath_atx_tid_t *tid = ATH_AN_2_TID(an, tidno);
2016 struct wmi_data_delba wmi_delba;
2018 tid->flag &= ~TID_AGGR_ENABLED;
2020 ath_tgt_tx_cleanup(sc, an, tid, 1);
2022 wmi_delba.ni_nodeindex = ni->ni_nodeindex;
2023 wmi_delba.tidno = tid->tidno;
2024 wmi_delba.initiator = 1;
2025 wmi_delba.reasoncode = IEEE80211_REASON_UNSPECIFIED;
2027 __stats(sc, txbar_xretry);
2028 wmi_event(sc->tgt_wmi_handle,
2034 static void ath_bar_retry(struct ath_softc_tgt *sc, struct ath_tx_buf *bf)
2036 struct ath_node_target *an = ATH_NODE_TARGET(bf->bf_node);
2037 ath_atx_tid_t *tid = ATH_AN_2_TID(an, bf->bf_tidno);
2039 if (bf->bf_retries >= OWLMAX_BAR_RETRIES) {
2040 ath_tgt_delba_send(sc, bf->bf_node, tid->tidno, 1,
2041 IEEE80211_REASON_UNSPECIFIED);
2042 ath_tgt_tid_drain(sc, tid);
2045 ath_buf_comp(sc, bf);
2049 __stats(sc, txbar_compretries);
2051 if (!bf->bf_lastds->ds_link) {
2052 __stats(sc, txbar_errlast);
2053 bf = ath_buf_toggle(sc, bf, 1);
2056 bf->bf_lastds->ds_link = 0;
2058 ath_tx_set_retry(sc, bf);
2059 ath_tgt_txq_add_ucast(sc, bf);
2062 static void ath_bar_tx_comp(struct ath_softc_tgt *sc, struct ath_tx_buf *bf)
2064 struct ath_desc *ds = bf->bf_lastds;
2065 struct ath_node_target *an;
2067 struct ath_txq *txq;
2069 an = (struct ath_node_target *)bf->bf_node;
2070 tid = &an->tid[bf->bf_tidno];
2071 txq = TID_TO_ACTXQ(tid->tidno);
2073 if (ATH_DS_TX_STATUS(ds) & HAL_TXERR_XRETRY) {
2074 ath_bar_retry(sc, bf);
2078 ath_aggr_resume_tid(sc, tid);
2081 ath_buf_comp(sc, bf);
2084 static void ath_bar_tx(struct ath_softc_tgt *sc,
2085 ath_atx_tid_t *tid, struct ath_tx_buf *bf)
2088 struct ieee80211_frame_bar *bar;
2090 struct ath_desc *ds, *ds0;
2091 HAL_11N_RATE_SERIES series[4];
2093 adf_nbuf_queue_t skbhead;
2097 __stats(sc, tx_bars);
2099 memset(&series, 0, sizeof(series));
2101 ath_aggr_pause_tid(sc, tid);
2103 skb = adf_nbuf_queue_remove(&bf->bf_skbhead);
2104 adf_nbuf_peek_header(skb, &anbdata, &anblen);
2105 adf_nbuf_trim_tail(skb, anblen);
2106 bar = (struct ieee80211_frame_bar *) anbdata;
2110 ath_dma_unmap(sc, bf);
2111 adf_nbuf_queue_add(&bf->bf_skbhead, skb);
2113 bar->i_fc[1] = IEEE80211_FC1_DIR_NODS;
2114 bar->i_fc[0] = IEEE80211_FC0_VERSION_0 |
2115 IEEE80211_FC0_TYPE_CTL |
2116 IEEE80211_FC0_SUBTYPE_BAR;
2117 bar->i_ctl = tid->tidno << IEEE80211_BAR_CTL_TID_S |
2118 IEEE80211_BAR_CTL_COMBA;
2119 bar->i_seq = adf_os_cpu_to_le16(tid->seq_start << IEEE80211_SEQ_SEQ_SHIFT);
2121 bf->bf_seqno = tid->seq_start << IEEE80211_SEQ_SEQ_SHIFT;
2123 adf_nbuf_put_tail(skb, sizeof(struct ieee80211_frame_bar));
2125 bf->bf_comp = ath_bar_tx_comp;
2126 bf->bf_tidno = tid->tidno;
2127 bf->bf_node = &tid->an->ni;
2128 ath_dma_map(sc, bf);
2129 adf_nbuf_dmamap_info(bf->bf_dmamap, &bf->bf_dmamap_info);
2132 ath_hal_setuptxdesc(sc->sc_ah, ds
2133 , adf_nbuf_len(skb) + IEEE80211_CRC_LEN
2135 , HAL_PKT_TYPE_NORMAL
2142 | HAL_TXDESC_CLRDMASK
2144 , ATH_COMP_PROC_NO_COMP_NO_CCS);
2146 skbhead = bf->bf_skbhead;
2150 for (ds0 = ds, i=0; i < bf->bf_dmamap_info.nsegs; ds0++, i++) {
2151 ath_hal_clr11n_aggr(sc->sc_ah, ds0);
2154 ath_filltxdesc(sc, bf);
2156 for (i = 0 ; i < 4; i++) {
2157 series[i].Tries = ATH_TXMAXTRY;
2158 series[i].Rate = min_rate;
2159 series[i].ChSel = sc->sc_ic.ic_tx_chainmask;
2162 ath_hal_set11n_ratescenario(sc->sc_ah, bf->bf_desc, 0, 0, 0, series, 4, 4);
2163 ath_tgt_txq_add_ucast(sc, bf);