2 * Copyright (c) 2008-2009 Atheros Communications Inc.
4 * Permission to use, copy, modify, and/or distribute this software for any
5 * purpose with or without fee is hereby granted, provided that the above
6 * copyright notice and this permission notice appear in all copies.
8 * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
9 * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
10 * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
11 * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
12 * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
13 * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
14 * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
18 #include <asm/unaligned.h>
19 #include <linux/pci.h>
25 #define ATH9K_CLOCK_RATE_CCK 22
26 #define ATH9K_CLOCK_RATE_5GHZ_OFDM 40
27 #define ATH9K_CLOCK_RATE_2GHZ_OFDM 44
29 static bool ath9k_hw_set_reset_reg(struct ath_hw *ah, u32 type);
30 static void ath9k_hw_set_regs(struct ath_hw *ah, struct ath9k_channel *chan,
31 enum ath9k_ht_macmode macmode);
32 static u32 ath9k_hw_ini_fixup(struct ath_hw *ah,
33 struct ar5416_eeprom_def *pEepData,
35 static void ath9k_hw_9280_spur_mitigate(struct ath_hw *ah, struct ath9k_channel *chan);
36 static void ath9k_hw_spur_mitigate(struct ath_hw *ah, struct ath9k_channel *chan);
38 /********************/
39 /* Helper Functions */
40 /********************/
42 static u32 ath9k_hw_mac_usec(struct ath_hw *ah, u32 clks)
44 struct ieee80211_conf *conf = &ah->ah_sc->hw->conf;
46 if (!ah->curchan) /* should really check for CCK instead */
47 return clks / ATH9K_CLOCK_RATE_CCK;
48 if (conf->channel->band == IEEE80211_BAND_2GHZ)
49 return clks / ATH9K_CLOCK_RATE_2GHZ_OFDM;
51 return clks / ATH9K_CLOCK_RATE_5GHZ_OFDM;
54 static u32 ath9k_hw_mac_to_usec(struct ath_hw *ah, u32 clks)
56 struct ieee80211_conf *conf = &ah->ah_sc->hw->conf;
58 if (conf_is_ht40(conf))
59 return ath9k_hw_mac_usec(ah, clks) / 2;
61 return ath9k_hw_mac_usec(ah, clks);
64 static u32 ath9k_hw_mac_clks(struct ath_hw *ah, u32 usecs)
66 struct ieee80211_conf *conf = &ah->ah_sc->hw->conf;
68 if (!ah->curchan) /* should really check for CCK instead */
69 return usecs *ATH9K_CLOCK_RATE_CCK;
70 if (conf->channel->band == IEEE80211_BAND_2GHZ)
71 return usecs *ATH9K_CLOCK_RATE_2GHZ_OFDM;
72 return usecs *ATH9K_CLOCK_RATE_5GHZ_OFDM;
75 static u32 ath9k_hw_mac_to_clks(struct ath_hw *ah, u32 usecs)
77 struct ieee80211_conf *conf = &ah->ah_sc->hw->conf;
79 if (conf_is_ht40(conf))
80 return ath9k_hw_mac_clks(ah, usecs) * 2;
82 return ath9k_hw_mac_clks(ah, usecs);
86 * Read and write, they both share the same lock. We do this to serialize
87 * reads and writes on Atheros 802.11n PCI devices only. This is required
88 * as the FIFO on these devices can only accept sanely 2 requests. After
89 * that the device goes bananas. Serializing the reads/writes prevents this
93 void ath9k_iowrite32(struct ath_hw *ah, u32 reg_offset, u32 val)
95 if (ah->config.serialize_regmode == SER_REG_MODE_ON) {
97 spin_lock_irqsave(&ah->ah_sc->sc_serial_rw, flags);
98 iowrite32(val, ah->ah_sc->mem + reg_offset);
99 spin_unlock_irqrestore(&ah->ah_sc->sc_serial_rw, flags);
101 iowrite32(val, ah->ah_sc->mem + reg_offset);
104 unsigned int ath9k_ioread32(struct ath_hw *ah, u32 reg_offset)
107 if (ah->config.serialize_regmode == SER_REG_MODE_ON) {
109 spin_lock_irqsave(&ah->ah_sc->sc_serial_rw, flags);
110 val = ioread32(ah->ah_sc->mem + reg_offset);
111 spin_unlock_irqrestore(&ah->ah_sc->sc_serial_rw, flags);
113 val = ioread32(ah->ah_sc->mem + reg_offset);
117 bool ath9k_hw_wait(struct ath_hw *ah, u32 reg, u32 mask, u32 val, u32 timeout)
121 BUG_ON(timeout < AH_TIME_QUANTUM);
123 for (i = 0; i < (timeout / AH_TIME_QUANTUM); i++) {
124 if ((REG_READ(ah, reg) & mask) == val)
127 udelay(AH_TIME_QUANTUM);
130 DPRINTF(ah, ATH_DBG_ANY,
131 "timeout (%d us) on reg 0x%x: 0x%08x & 0x%08x != 0x%08x\n",
132 timeout, reg, REG_READ(ah, reg), mask, val);
137 u32 ath9k_hw_reverse_bits(u32 val, u32 n)
142 for (i = 0, retval = 0; i < n; i++) {
143 retval = (retval << 1) | (val & 1);
149 bool ath9k_get_channel_edges(struct ath_hw *ah,
153 struct ath9k_hw_capabilities *pCap = &ah->caps;
155 if (flags & CHANNEL_5GHZ) {
156 *low = pCap->low_5ghz_chan;
157 *high = pCap->high_5ghz_chan;
160 if ((flags & CHANNEL_2GHZ)) {
161 *low = pCap->low_2ghz_chan;
162 *high = pCap->high_2ghz_chan;
168 u16 ath9k_hw_computetxtime(struct ath_hw *ah,
169 const struct ath_rate_table *rates,
170 u32 frameLen, u16 rateix,
173 u32 bitsPerSymbol, numBits, numSymbols, phyTime, txTime;
176 kbps = rates->info[rateix].ratekbps;
181 switch (rates->info[rateix].phy) {
182 case WLAN_RC_PHY_CCK:
183 phyTime = CCK_PREAMBLE_BITS + CCK_PLCP_BITS;
184 if (shortPreamble && rates->info[rateix].short_preamble)
186 numBits = frameLen << 3;
187 txTime = CCK_SIFS_TIME + phyTime + ((numBits * 1000) / kbps);
189 case WLAN_RC_PHY_OFDM:
190 if (ah->curchan && IS_CHAN_QUARTER_RATE(ah->curchan)) {
191 bitsPerSymbol = (kbps * OFDM_SYMBOL_TIME_QUARTER) / 1000;
192 numBits = OFDM_PLCP_BITS + (frameLen << 3);
193 numSymbols = DIV_ROUND_UP(numBits, bitsPerSymbol);
194 txTime = OFDM_SIFS_TIME_QUARTER
195 + OFDM_PREAMBLE_TIME_QUARTER
196 + (numSymbols * OFDM_SYMBOL_TIME_QUARTER);
197 } else if (ah->curchan &&
198 IS_CHAN_HALF_RATE(ah->curchan)) {
199 bitsPerSymbol = (kbps * OFDM_SYMBOL_TIME_HALF) / 1000;
200 numBits = OFDM_PLCP_BITS + (frameLen << 3);
201 numSymbols = DIV_ROUND_UP(numBits, bitsPerSymbol);
202 txTime = OFDM_SIFS_TIME_HALF +
203 OFDM_PREAMBLE_TIME_HALF
204 + (numSymbols * OFDM_SYMBOL_TIME_HALF);
206 bitsPerSymbol = (kbps * OFDM_SYMBOL_TIME) / 1000;
207 numBits = OFDM_PLCP_BITS + (frameLen << 3);
208 numSymbols = DIV_ROUND_UP(numBits, bitsPerSymbol);
209 txTime = OFDM_SIFS_TIME + OFDM_PREAMBLE_TIME
210 + (numSymbols * OFDM_SYMBOL_TIME);
214 DPRINTF(ah, ATH_DBG_FATAL,
215 "Unknown phy %u (rate ix %u)\n",
216 rates->info[rateix].phy, rateix);
224 void ath9k_hw_get_channel_centers(struct ath_hw *ah,
225 struct ath9k_channel *chan,
226 struct chan_centers *centers)
230 if (!IS_CHAN_HT40(chan)) {
231 centers->ctl_center = centers->ext_center =
232 centers->synth_center = chan->channel;
236 if ((chan->chanmode == CHANNEL_A_HT40PLUS) ||
237 (chan->chanmode == CHANNEL_G_HT40PLUS)) {
238 centers->synth_center =
239 chan->channel + HT40_CHANNEL_CENTER_SHIFT;
242 centers->synth_center =
243 chan->channel - HT40_CHANNEL_CENTER_SHIFT;
247 centers->ctl_center =
248 centers->synth_center - (extoff * HT40_CHANNEL_CENTER_SHIFT);
249 centers->ext_center =
250 centers->synth_center + (extoff *
251 ((ah->extprotspacing == ATH9K_HT_EXTPROTSPACING_20) ?
252 HT40_CHANNEL_CENTER_SHIFT : 15));
259 static void ath9k_hw_read_revisions(struct ath_hw *ah)
263 val = REG_READ(ah, AR_SREV) & AR_SREV_ID;
266 val = REG_READ(ah, AR_SREV);
267 ah->hw_version.macVersion =
268 (val & AR_SREV_VERSION2) >> AR_SREV_TYPE2_S;
269 ah->hw_version.macRev = MS(val, AR_SREV_REVISION2);
270 ah->is_pciexpress = (val & AR_SREV_TYPE2_HOST_MODE) ? 0 : 1;
272 if (!AR_SREV_9100(ah))
273 ah->hw_version.macVersion = MS(val, AR_SREV_VERSION);
275 ah->hw_version.macRev = val & AR_SREV_REVISION;
277 if (ah->hw_version.macVersion == AR_SREV_VERSION_5416_PCIE)
278 ah->is_pciexpress = true;
282 static int ath9k_hw_get_radiorev(struct ath_hw *ah)
287 REG_WRITE(ah, AR_PHY(0x36), 0x00007058);
289 for (i = 0; i < 8; i++)
290 REG_WRITE(ah, AR_PHY(0x20), 0x00010000);
291 val = (REG_READ(ah, AR_PHY(256)) >> 24) & 0xff;
292 val = ((val & 0xf0) >> 4) | ((val & 0x0f) << 4);
294 return ath9k_hw_reverse_bits(val, 8);
297 /************************************/
298 /* HW Attach, Detach, Init Routines */
299 /************************************/
301 static void ath9k_hw_disablepcie(struct ath_hw *ah)
303 if (AR_SREV_9100(ah))
306 REG_WRITE(ah, AR_PCIE_SERDES, 0x9248fc00);
307 REG_WRITE(ah, AR_PCIE_SERDES, 0x24924924);
308 REG_WRITE(ah, AR_PCIE_SERDES, 0x28000029);
309 REG_WRITE(ah, AR_PCIE_SERDES, 0x57160824);
310 REG_WRITE(ah, AR_PCIE_SERDES, 0x25980579);
311 REG_WRITE(ah, AR_PCIE_SERDES, 0x00000000);
312 REG_WRITE(ah, AR_PCIE_SERDES, 0x1aaabe40);
313 REG_WRITE(ah, AR_PCIE_SERDES, 0xbe105554);
314 REG_WRITE(ah, AR_PCIE_SERDES, 0x000e1007);
316 REG_WRITE(ah, AR_PCIE_SERDES2, 0x00000000);
319 static bool ath9k_hw_chip_test(struct ath_hw *ah)
321 u32 regAddr[2] = { AR_STA_ID0, AR_PHY_BASE + (8 << 2) };
323 u32 patternData[4] = { 0x55555555,
329 for (i = 0; i < 2; i++) {
330 u32 addr = regAddr[i];
333 regHold[i] = REG_READ(ah, addr);
334 for (j = 0; j < 0x100; j++) {
335 wrData = (j << 16) | j;
336 REG_WRITE(ah, addr, wrData);
337 rdData = REG_READ(ah, addr);
338 if (rdData != wrData) {
339 DPRINTF(ah, ATH_DBG_FATAL,
340 "address test failed "
341 "addr: 0x%08x - wr:0x%08x != rd:0x%08x\n",
342 addr, wrData, rdData);
346 for (j = 0; j < 4; j++) {
347 wrData = patternData[j];
348 REG_WRITE(ah, addr, wrData);
349 rdData = REG_READ(ah, addr);
350 if (wrData != rdData) {
351 DPRINTF(ah, ATH_DBG_FATAL,
352 "address test failed "
353 "addr: 0x%08x - wr:0x%08x != rd:0x%08x\n",
354 addr, wrData, rdData);
358 REG_WRITE(ah, regAddr[i], regHold[i]);
365 static const char *ath9k_hw_devname(u16 devid)
368 case AR5416_DEVID_PCI:
369 return "Atheros 5416";
370 case AR5416_DEVID_PCIE:
371 return "Atheros 5418";
372 case AR9160_DEVID_PCI:
373 return "Atheros 9160";
374 case AR5416_AR9100_DEVID:
375 return "Atheros 9100";
376 case AR9280_DEVID_PCI:
377 case AR9280_DEVID_PCIE:
378 return "Atheros 9280";
379 case AR9285_DEVID_PCIE:
380 return "Atheros 9285";
381 case AR5416_DEVID_AR9287_PCI:
382 case AR5416_DEVID_AR9287_PCIE:
383 return "Atheros 9287";
389 static void ath9k_hw_init_config(struct ath_hw *ah)
393 ah->config.dma_beacon_response_time = 2;
394 ah->config.sw_beacon_response_time = 10;
395 ah->config.additional_swba_backoff = 0;
396 ah->config.ack_6mb = 0x0;
397 ah->config.cwm_ignore_extcca = 0;
398 ah->config.pcie_powersave_enable = 0;
399 ah->config.pcie_clock_req = 0;
400 ah->config.pcie_waen = 0;
401 ah->config.analog_shiftreg = 1;
402 ah->config.ht_enable = 1;
403 ah->config.ofdm_trig_low = 200;
404 ah->config.ofdm_trig_high = 500;
405 ah->config.cck_trig_high = 200;
406 ah->config.cck_trig_low = 100;
407 ah->config.enable_ani = 1;
408 ah->config.diversity_control = ATH9K_ANT_VARIABLE;
409 ah->config.antenna_switch_swap = 0;
411 for (i = 0; i < AR_EEPROM_MODAL_SPURS; i++) {
412 ah->config.spurchans[i][0] = AR_NO_SPUR;
413 ah->config.spurchans[i][1] = AR_NO_SPUR;
416 ah->config.intr_mitigation = true;
419 * We need this for PCI devices only (Cardbus, PCI, miniPCI)
420 * _and_ if on non-uniprocessor systems (Multiprocessor/HT).
421 * This means we use it for all AR5416 devices, and the few
422 * minor PCI AR9280 devices out there.
424 * Serialization is required because these devices do not handle
425 * well the case of two concurrent reads/writes due to the latency
426 * involved. During one read/write another read/write can be issued
427 * on another CPU while the previous read/write may still be working
428 * on our hardware, if we hit this case the hardware poops in a loop.
429 * We prevent this by serializing reads and writes.
431 * This issue is not present on PCI-Express devices or pre-AR5416
432 * devices (legacy, 802.11abg).
434 if (num_possible_cpus() > 1)
435 ah->config.serialize_regmode = SER_REG_MODE_AUTO;
438 static void ath9k_hw_init_defaults(struct ath_hw *ah)
440 struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah);
442 regulatory->country_code = CTRY_DEFAULT;
443 regulatory->power_limit = MAX_RATE_POWER;
444 regulatory->tp_scale = ATH9K_TP_SCALE_MAX;
446 ah->hw_version.magic = AR5416_MAGIC;
447 ah->hw_version.subvendorid = 0;
450 if (ah->hw_version.devid == AR5416_AR9100_DEVID)
451 ah->hw_version.macVersion = AR_SREV_VERSION_9100;
452 if (!AR_SREV_9100(ah))
453 ah->ah_flags = AH_USE_EEPROM;
456 ah->sta_id1_defaults = AR_STA_ID1_CRPT_MIC_ENABLE;
457 ah->beacon_interval = 100;
458 ah->enable_32kHz_clock = DONT_USE_32KHZ;
459 ah->slottime = (u32) -1;
460 ah->acktimeout = (u32) -1;
461 ah->ctstimeout = (u32) -1;
462 ah->globaltxtimeout = (u32) -1;
464 ah->gbeacon_rate = 0;
466 ah->power_mode = ATH9K_PM_UNDEFINED;
469 static int ath9k_hw_rfattach(struct ath_hw *ah)
471 bool rfStatus = false;
474 rfStatus = ath9k_hw_init_rf(ah, &ecode);
476 DPRINTF(ah, ATH_DBG_FATAL,
477 "RF setup failed, status: %u\n", ecode);
484 static int ath9k_hw_rf_claim(struct ath_hw *ah)
488 REG_WRITE(ah, AR_PHY(0), 0x00000007);
490 val = ath9k_hw_get_radiorev(ah);
491 switch (val & AR_RADIO_SREV_MAJOR) {
493 val = AR_RAD5133_SREV_MAJOR;
495 case AR_RAD5133_SREV_MAJOR:
496 case AR_RAD5122_SREV_MAJOR:
497 case AR_RAD2133_SREV_MAJOR:
498 case AR_RAD2122_SREV_MAJOR:
501 DPRINTF(ah, ATH_DBG_FATAL,
502 "Radio Chip Rev 0x%02X not supported\n",
503 val & AR_RADIO_SREV_MAJOR);
507 ah->hw_version.analog5GhzRev = val;
512 static int ath9k_hw_init_macaddr(struct ath_hw *ah)
519 for (i = 0; i < 3; i++) {
520 eeval = ah->eep_ops->get_eeprom(ah, AR_EEPROM_MAC(i));
522 ah->macaddr[2 * i] = eeval >> 8;
523 ah->macaddr[2 * i + 1] = eeval & 0xff;
525 if (sum == 0 || sum == 0xffff * 3)
526 return -EADDRNOTAVAIL;
531 static void ath9k_hw_init_rxgain_ini(struct ath_hw *ah)
535 if (ah->eep_ops->get_eeprom(ah, EEP_MINOR_REV) >= AR5416_EEP_MINOR_VER_17) {
536 rxgain_type = ah->eep_ops->get_eeprom(ah, EEP_RXGAIN_TYPE);
538 if (rxgain_type == AR5416_EEP_RXGAIN_13DB_BACKOFF)
539 INIT_INI_ARRAY(&ah->iniModesRxGain,
540 ar9280Modes_backoff_13db_rxgain_9280_2,
541 ARRAY_SIZE(ar9280Modes_backoff_13db_rxgain_9280_2), 6);
542 else if (rxgain_type == AR5416_EEP_RXGAIN_23DB_BACKOFF)
543 INIT_INI_ARRAY(&ah->iniModesRxGain,
544 ar9280Modes_backoff_23db_rxgain_9280_2,
545 ARRAY_SIZE(ar9280Modes_backoff_23db_rxgain_9280_2), 6);
547 INIT_INI_ARRAY(&ah->iniModesRxGain,
548 ar9280Modes_original_rxgain_9280_2,
549 ARRAY_SIZE(ar9280Modes_original_rxgain_9280_2), 6);
551 INIT_INI_ARRAY(&ah->iniModesRxGain,
552 ar9280Modes_original_rxgain_9280_2,
553 ARRAY_SIZE(ar9280Modes_original_rxgain_9280_2), 6);
557 static void ath9k_hw_init_txgain_ini(struct ath_hw *ah)
561 if (ah->eep_ops->get_eeprom(ah, EEP_MINOR_REV) >= AR5416_EEP_MINOR_VER_19) {
562 txgain_type = ah->eep_ops->get_eeprom(ah, EEP_TXGAIN_TYPE);
564 if (txgain_type == AR5416_EEP_TXGAIN_HIGH_POWER)
565 INIT_INI_ARRAY(&ah->iniModesTxGain,
566 ar9280Modes_high_power_tx_gain_9280_2,
567 ARRAY_SIZE(ar9280Modes_high_power_tx_gain_9280_2), 6);
569 INIT_INI_ARRAY(&ah->iniModesTxGain,
570 ar9280Modes_original_tx_gain_9280_2,
571 ARRAY_SIZE(ar9280Modes_original_tx_gain_9280_2), 6);
573 INIT_INI_ARRAY(&ah->iniModesTxGain,
574 ar9280Modes_original_tx_gain_9280_2,
575 ARRAY_SIZE(ar9280Modes_original_tx_gain_9280_2), 6);
579 static int ath9k_hw_post_init(struct ath_hw *ah)
583 if (!ath9k_hw_chip_test(ah))
586 ecode = ath9k_hw_rf_claim(ah);
590 ecode = ath9k_hw_eeprom_init(ah);
594 DPRINTF(ah, ATH_DBG_CONFIG, "Eeprom VER: %d, REV: %d\n",
595 ah->eep_ops->get_eeprom_ver(ah), ah->eep_ops->get_eeprom_rev(ah));
597 ecode = ath9k_hw_rfattach(ah);
601 if (!AR_SREV_9100(ah)) {
602 ath9k_hw_ani_setup(ah);
603 ath9k_hw_ani_init(ah);
609 static bool ath9k_hw_devid_supported(u16 devid)
612 case AR5416_DEVID_PCI:
613 case AR5416_DEVID_PCIE:
614 case AR5416_AR9100_DEVID:
615 case AR9160_DEVID_PCI:
616 case AR9280_DEVID_PCI:
617 case AR9280_DEVID_PCIE:
618 case AR9285_DEVID_PCIE:
619 case AR5416_DEVID_AR9287_PCI:
620 case AR5416_DEVID_AR9287_PCIE:
628 static bool ath9k_hw_macversion_supported(u32 macversion)
630 switch (macversion) {
631 case AR_SREV_VERSION_5416_PCI:
632 case AR_SREV_VERSION_5416_PCIE:
633 case AR_SREV_VERSION_9160:
634 case AR_SREV_VERSION_9100:
635 case AR_SREV_VERSION_9280:
636 case AR_SREV_VERSION_9285:
637 case AR_SREV_VERSION_9287:
640 case AR_SREV_VERSION_9271:
647 static void ath9k_hw_init_cal_settings(struct ath_hw *ah)
649 if (AR_SREV_9160_10_OR_LATER(ah)) {
650 if (AR_SREV_9280_10_OR_LATER(ah)) {
651 ah->iq_caldata.calData = &iq_cal_single_sample;
652 ah->adcgain_caldata.calData =
653 &adc_gain_cal_single_sample;
654 ah->adcdc_caldata.calData =
655 &adc_dc_cal_single_sample;
656 ah->adcdc_calinitdata.calData =
659 ah->iq_caldata.calData = &iq_cal_multi_sample;
660 ah->adcgain_caldata.calData =
661 &adc_gain_cal_multi_sample;
662 ah->adcdc_caldata.calData =
663 &adc_dc_cal_multi_sample;
664 ah->adcdc_calinitdata.calData =
667 ah->supp_cals = ADC_GAIN_CAL | ADC_DC_CAL | IQ_MISMATCH_CAL;
671 static void ath9k_hw_init_mode_regs(struct ath_hw *ah)
673 if (AR_SREV_9271(ah)) {
674 INIT_INI_ARRAY(&ah->iniModes, ar9271Modes_9271_1_0,
675 ARRAY_SIZE(ar9271Modes_9271_1_0), 6);
676 INIT_INI_ARRAY(&ah->iniCommon, ar9271Common_9271_1_0,
677 ARRAY_SIZE(ar9271Common_9271_1_0), 2);
681 if (AR_SREV_9287_11_OR_LATER(ah)) {
682 INIT_INI_ARRAY(&ah->iniModes, ar9287Modes_9287_1_1,
683 ARRAY_SIZE(ar9287Modes_9287_1_1), 6);
684 INIT_INI_ARRAY(&ah->iniCommon, ar9287Common_9287_1_1,
685 ARRAY_SIZE(ar9287Common_9287_1_1), 2);
686 if (ah->config.pcie_clock_req)
687 INIT_INI_ARRAY(&ah->iniPcieSerdes,
688 ar9287PciePhy_clkreq_off_L1_9287_1_1,
689 ARRAY_SIZE(ar9287PciePhy_clkreq_off_L1_9287_1_1), 2);
691 INIT_INI_ARRAY(&ah->iniPcieSerdes,
692 ar9287PciePhy_clkreq_always_on_L1_9287_1_1,
693 ARRAY_SIZE(ar9287PciePhy_clkreq_always_on_L1_9287_1_1),
695 } else if (AR_SREV_9287_10_OR_LATER(ah)) {
696 INIT_INI_ARRAY(&ah->iniModes, ar9287Modes_9287_1_0,
697 ARRAY_SIZE(ar9287Modes_9287_1_0), 6);
698 INIT_INI_ARRAY(&ah->iniCommon, ar9287Common_9287_1_0,
699 ARRAY_SIZE(ar9287Common_9287_1_0), 2);
701 if (ah->config.pcie_clock_req)
702 INIT_INI_ARRAY(&ah->iniPcieSerdes,
703 ar9287PciePhy_clkreq_off_L1_9287_1_0,
704 ARRAY_SIZE(ar9287PciePhy_clkreq_off_L1_9287_1_0), 2);
706 INIT_INI_ARRAY(&ah->iniPcieSerdes,
707 ar9287PciePhy_clkreq_always_on_L1_9287_1_0,
708 ARRAY_SIZE(ar9287PciePhy_clkreq_always_on_L1_9287_1_0),
710 } else if (AR_SREV_9285_12_OR_LATER(ah)) {
713 INIT_INI_ARRAY(&ah->iniModes, ar9285Modes_9285_1_2,
714 ARRAY_SIZE(ar9285Modes_9285_1_2), 6);
715 INIT_INI_ARRAY(&ah->iniCommon, ar9285Common_9285_1_2,
716 ARRAY_SIZE(ar9285Common_9285_1_2), 2);
718 if (ah->config.pcie_clock_req) {
719 INIT_INI_ARRAY(&ah->iniPcieSerdes,
720 ar9285PciePhy_clkreq_off_L1_9285_1_2,
721 ARRAY_SIZE(ar9285PciePhy_clkreq_off_L1_9285_1_2), 2);
723 INIT_INI_ARRAY(&ah->iniPcieSerdes,
724 ar9285PciePhy_clkreq_always_on_L1_9285_1_2,
725 ARRAY_SIZE(ar9285PciePhy_clkreq_always_on_L1_9285_1_2),
728 } else if (AR_SREV_9285_10_OR_LATER(ah)) {
729 INIT_INI_ARRAY(&ah->iniModes, ar9285Modes_9285,
730 ARRAY_SIZE(ar9285Modes_9285), 6);
731 INIT_INI_ARRAY(&ah->iniCommon, ar9285Common_9285,
732 ARRAY_SIZE(ar9285Common_9285), 2);
734 if (ah->config.pcie_clock_req) {
735 INIT_INI_ARRAY(&ah->iniPcieSerdes,
736 ar9285PciePhy_clkreq_off_L1_9285,
737 ARRAY_SIZE(ar9285PciePhy_clkreq_off_L1_9285), 2);
739 INIT_INI_ARRAY(&ah->iniPcieSerdes,
740 ar9285PciePhy_clkreq_always_on_L1_9285,
741 ARRAY_SIZE(ar9285PciePhy_clkreq_always_on_L1_9285), 2);
743 } else if (AR_SREV_9280_20_OR_LATER(ah)) {
744 INIT_INI_ARRAY(&ah->iniModes, ar9280Modes_9280_2,
745 ARRAY_SIZE(ar9280Modes_9280_2), 6);
746 INIT_INI_ARRAY(&ah->iniCommon, ar9280Common_9280_2,
747 ARRAY_SIZE(ar9280Common_9280_2), 2);
749 if (ah->config.pcie_clock_req) {
750 INIT_INI_ARRAY(&ah->iniPcieSerdes,
751 ar9280PciePhy_clkreq_off_L1_9280,
752 ARRAY_SIZE(ar9280PciePhy_clkreq_off_L1_9280),2);
754 INIT_INI_ARRAY(&ah->iniPcieSerdes,
755 ar9280PciePhy_clkreq_always_on_L1_9280,
756 ARRAY_SIZE(ar9280PciePhy_clkreq_always_on_L1_9280), 2);
758 INIT_INI_ARRAY(&ah->iniModesAdditional,
759 ar9280Modes_fast_clock_9280_2,
760 ARRAY_SIZE(ar9280Modes_fast_clock_9280_2), 3);
761 } else if (AR_SREV_9280_10_OR_LATER(ah)) {
762 INIT_INI_ARRAY(&ah->iniModes, ar9280Modes_9280,
763 ARRAY_SIZE(ar9280Modes_9280), 6);
764 INIT_INI_ARRAY(&ah->iniCommon, ar9280Common_9280,
765 ARRAY_SIZE(ar9280Common_9280), 2);
766 } else if (AR_SREV_9160_10_OR_LATER(ah)) {
767 INIT_INI_ARRAY(&ah->iniModes, ar5416Modes_9160,
768 ARRAY_SIZE(ar5416Modes_9160), 6);
769 INIT_INI_ARRAY(&ah->iniCommon, ar5416Common_9160,
770 ARRAY_SIZE(ar5416Common_9160), 2);
771 INIT_INI_ARRAY(&ah->iniBank0, ar5416Bank0_9160,
772 ARRAY_SIZE(ar5416Bank0_9160), 2);
773 INIT_INI_ARRAY(&ah->iniBB_RfGain, ar5416BB_RfGain_9160,
774 ARRAY_SIZE(ar5416BB_RfGain_9160), 3);
775 INIT_INI_ARRAY(&ah->iniBank1, ar5416Bank1_9160,
776 ARRAY_SIZE(ar5416Bank1_9160), 2);
777 INIT_INI_ARRAY(&ah->iniBank2, ar5416Bank2_9160,
778 ARRAY_SIZE(ar5416Bank2_9160), 2);
779 INIT_INI_ARRAY(&ah->iniBank3, ar5416Bank3_9160,
780 ARRAY_SIZE(ar5416Bank3_9160), 3);
781 INIT_INI_ARRAY(&ah->iniBank6, ar5416Bank6_9160,
782 ARRAY_SIZE(ar5416Bank6_9160), 3);
783 INIT_INI_ARRAY(&ah->iniBank6TPC, ar5416Bank6TPC_9160,
784 ARRAY_SIZE(ar5416Bank6TPC_9160), 3);
785 INIT_INI_ARRAY(&ah->iniBank7, ar5416Bank7_9160,
786 ARRAY_SIZE(ar5416Bank7_9160), 2);
787 if (AR_SREV_9160_11(ah)) {
788 INIT_INI_ARRAY(&ah->iniAddac,
790 ARRAY_SIZE(ar5416Addac_91601_1), 2);
792 INIT_INI_ARRAY(&ah->iniAddac, ar5416Addac_9160,
793 ARRAY_SIZE(ar5416Addac_9160), 2);
795 } else if (AR_SREV_9100_OR_LATER(ah)) {
796 INIT_INI_ARRAY(&ah->iniModes, ar5416Modes_9100,
797 ARRAY_SIZE(ar5416Modes_9100), 6);
798 INIT_INI_ARRAY(&ah->iniCommon, ar5416Common_9100,
799 ARRAY_SIZE(ar5416Common_9100), 2);
800 INIT_INI_ARRAY(&ah->iniBank0, ar5416Bank0_9100,
801 ARRAY_SIZE(ar5416Bank0_9100), 2);
802 INIT_INI_ARRAY(&ah->iniBB_RfGain, ar5416BB_RfGain_9100,
803 ARRAY_SIZE(ar5416BB_RfGain_9100), 3);
804 INIT_INI_ARRAY(&ah->iniBank1, ar5416Bank1_9100,
805 ARRAY_SIZE(ar5416Bank1_9100), 2);
806 INIT_INI_ARRAY(&ah->iniBank2, ar5416Bank2_9100,
807 ARRAY_SIZE(ar5416Bank2_9100), 2);
808 INIT_INI_ARRAY(&ah->iniBank3, ar5416Bank3_9100,
809 ARRAY_SIZE(ar5416Bank3_9100), 3);
810 INIT_INI_ARRAY(&ah->iniBank6, ar5416Bank6_9100,
811 ARRAY_SIZE(ar5416Bank6_9100), 3);
812 INIT_INI_ARRAY(&ah->iniBank6TPC, ar5416Bank6TPC_9100,
813 ARRAY_SIZE(ar5416Bank6TPC_9100), 3);
814 INIT_INI_ARRAY(&ah->iniBank7, ar5416Bank7_9100,
815 ARRAY_SIZE(ar5416Bank7_9100), 2);
816 INIT_INI_ARRAY(&ah->iniAddac, ar5416Addac_9100,
817 ARRAY_SIZE(ar5416Addac_9100), 2);
819 INIT_INI_ARRAY(&ah->iniModes, ar5416Modes,
820 ARRAY_SIZE(ar5416Modes), 6);
821 INIT_INI_ARRAY(&ah->iniCommon, ar5416Common,
822 ARRAY_SIZE(ar5416Common), 2);
823 INIT_INI_ARRAY(&ah->iniBank0, ar5416Bank0,
824 ARRAY_SIZE(ar5416Bank0), 2);
825 INIT_INI_ARRAY(&ah->iniBB_RfGain, ar5416BB_RfGain,
826 ARRAY_SIZE(ar5416BB_RfGain), 3);
827 INIT_INI_ARRAY(&ah->iniBank1, ar5416Bank1,
828 ARRAY_SIZE(ar5416Bank1), 2);
829 INIT_INI_ARRAY(&ah->iniBank2, ar5416Bank2,
830 ARRAY_SIZE(ar5416Bank2), 2);
831 INIT_INI_ARRAY(&ah->iniBank3, ar5416Bank3,
832 ARRAY_SIZE(ar5416Bank3), 3);
833 INIT_INI_ARRAY(&ah->iniBank6, ar5416Bank6,
834 ARRAY_SIZE(ar5416Bank6), 3);
835 INIT_INI_ARRAY(&ah->iniBank6TPC, ar5416Bank6TPC,
836 ARRAY_SIZE(ar5416Bank6TPC), 3);
837 INIT_INI_ARRAY(&ah->iniBank7, ar5416Bank7,
838 ARRAY_SIZE(ar5416Bank7), 2);
839 INIT_INI_ARRAY(&ah->iniAddac, ar5416Addac,
840 ARRAY_SIZE(ar5416Addac), 2);
844 static void ath9k_hw_init_mode_gain_regs(struct ath_hw *ah)
846 if (AR_SREV_9287_11_OR_LATER(ah))
847 INIT_INI_ARRAY(&ah->iniModesRxGain,
848 ar9287Modes_rx_gain_9287_1_1,
849 ARRAY_SIZE(ar9287Modes_rx_gain_9287_1_1), 6);
850 else if (AR_SREV_9287_10(ah))
851 INIT_INI_ARRAY(&ah->iniModesRxGain,
852 ar9287Modes_rx_gain_9287_1_0,
853 ARRAY_SIZE(ar9287Modes_rx_gain_9287_1_0), 6);
854 else if (AR_SREV_9280_20(ah))
855 ath9k_hw_init_rxgain_ini(ah);
857 if (AR_SREV_9287_11_OR_LATER(ah)) {
858 INIT_INI_ARRAY(&ah->iniModesTxGain,
859 ar9287Modes_tx_gain_9287_1_1,
860 ARRAY_SIZE(ar9287Modes_tx_gain_9287_1_1), 6);
861 } else if (AR_SREV_9287_10(ah)) {
862 INIT_INI_ARRAY(&ah->iniModesTxGain,
863 ar9287Modes_tx_gain_9287_1_0,
864 ARRAY_SIZE(ar9287Modes_tx_gain_9287_1_0), 6);
865 } else if (AR_SREV_9280_20(ah)) {
866 ath9k_hw_init_txgain_ini(ah);
867 } else if (AR_SREV_9285_12_OR_LATER(ah)) {
868 u32 txgain_type = ah->eep_ops->get_eeprom(ah, EEP_TXGAIN_TYPE);
871 if (txgain_type == AR5416_EEP_TXGAIN_HIGH_POWER) {
872 INIT_INI_ARRAY(&ah->iniModesTxGain,
873 ar9285Modes_high_power_tx_gain_9285_1_2,
874 ARRAY_SIZE(ar9285Modes_high_power_tx_gain_9285_1_2), 6);
876 INIT_INI_ARRAY(&ah->iniModesTxGain,
877 ar9285Modes_original_tx_gain_9285_1_2,
878 ARRAY_SIZE(ar9285Modes_original_tx_gain_9285_1_2), 6);
884 static void ath9k_hw_init_11a_eeprom_fix(struct ath_hw *ah)
888 if ((ah->hw_version.devid == AR9280_DEVID_PCI) &&
889 test_bit(ATH9K_MODE_11A, ah->caps.wireless_modes)) {
892 for (i = 0; i < ah->iniModes.ia_rows; i++) {
893 u32 reg = INI_RA(&ah->iniModes, i, 0);
895 for (j = 1; j < ah->iniModes.ia_columns; j++) {
896 u32 val = INI_RA(&ah->iniModes, i, j);
898 INI_RA(&ah->iniModes, i, j) =
899 ath9k_hw_ini_fixup(ah,
907 int ath9k_hw_init(struct ath_hw *ah)
911 if (!ath9k_hw_devid_supported(ah->hw_version.devid))
914 ath9k_hw_init_defaults(ah);
915 ath9k_hw_init_config(ah);
917 if (!ath9k_hw_set_reset_reg(ah, ATH9K_RESET_POWER_ON)) {
918 DPRINTF(ah, ATH_DBG_FATAL, "Couldn't reset chip\n");
922 if (!ath9k_hw_setpower(ah, ATH9K_PM_AWAKE)) {
923 DPRINTF(ah, ATH_DBG_FATAL, "Couldn't wakeup chip\n");
927 if (ah->config.serialize_regmode == SER_REG_MODE_AUTO) {
928 if (ah->hw_version.macVersion == AR_SREV_VERSION_5416_PCI ||
929 (AR_SREV_9280(ah) && !ah->is_pciexpress)) {
930 ah->config.serialize_regmode =
933 ah->config.serialize_regmode =
938 DPRINTF(ah, ATH_DBG_RESET, "serialize_regmode is %d\n",
939 ah->config.serialize_regmode);
941 if (!ath9k_hw_macversion_supported(ah->hw_version.macVersion)) {
942 DPRINTF(ah, ATH_DBG_FATAL,
943 "Mac Chip Rev 0x%02x.%x is not supported by "
944 "this driver\n", ah->hw_version.macVersion,
945 ah->hw_version.macRev);
949 if (AR_SREV_9100(ah)) {
950 ah->iq_caldata.calData = &iq_cal_multi_sample;
951 ah->supp_cals = IQ_MISMATCH_CAL;
952 ah->is_pciexpress = false;
955 if (AR_SREV_9271(ah))
956 ah->is_pciexpress = false;
958 ah->hw_version.phyRev = REG_READ(ah, AR_PHY_CHIP_ID);
960 ath9k_hw_init_cal_settings(ah);
962 ah->ani_function = ATH9K_ANI_ALL;
963 if (AR_SREV_9280_10_OR_LATER(ah))
964 ah->ani_function &= ~ATH9K_ANI_NOISE_IMMUNITY_LEVEL;
966 ath9k_hw_init_mode_regs(ah);
968 if (ah->is_pciexpress)
969 ath9k_hw_configpcipowersave(ah, 0, 0);
971 ath9k_hw_disablepcie(ah);
973 r = ath9k_hw_post_init(ah);
977 ath9k_hw_init_mode_gain_regs(ah);
978 ath9k_hw_fill_cap_info(ah);
979 ath9k_hw_init_11a_eeprom_fix(ah);
981 r = ath9k_hw_init_macaddr(ah);
983 DPRINTF(ah, ATH_DBG_FATAL,
984 "Failed to initialize MAC address\n");
988 if (AR_SREV_9285(ah) || AR_SREV_9271(ah))
989 ah->tx_trig_level = (AR_FTRIG_256B >> AR_FTRIG_S);
991 ah->tx_trig_level = (AR_FTRIG_512B >> AR_FTRIG_S);
993 ath9k_init_nfcal_hist_buffer(ah);
998 static void ath9k_hw_init_bb(struct ath_hw *ah,
999 struct ath9k_channel *chan)
1003 synthDelay = REG_READ(ah, AR_PHY_RX_DELAY) & AR_PHY_RX_DELAY_DELAY;
1004 if (IS_CHAN_B(chan))
1005 synthDelay = (4 * synthDelay) / 22;
1009 REG_WRITE(ah, AR_PHY_ACTIVE, AR_PHY_ACTIVE_EN);
1011 udelay(synthDelay + BASE_ACTIVATE_DELAY);
1014 static void ath9k_hw_init_qos(struct ath_hw *ah)
1016 REG_WRITE(ah, AR_MIC_QOS_CONTROL, 0x100aa);
1017 REG_WRITE(ah, AR_MIC_QOS_SELECT, 0x3210);
1019 REG_WRITE(ah, AR_QOS_NO_ACK,
1020 SM(2, AR_QOS_NO_ACK_TWO_BIT) |
1021 SM(5, AR_QOS_NO_ACK_BIT_OFF) |
1022 SM(0, AR_QOS_NO_ACK_BYTE_OFF));
1024 REG_WRITE(ah, AR_TXOP_X, AR_TXOP_X_VAL);
1025 REG_WRITE(ah, AR_TXOP_0_3, 0xFFFFFFFF);
1026 REG_WRITE(ah, AR_TXOP_4_7, 0xFFFFFFFF);
1027 REG_WRITE(ah, AR_TXOP_8_11, 0xFFFFFFFF);
1028 REG_WRITE(ah, AR_TXOP_12_15, 0xFFFFFFFF);
1031 static void ath9k_hw_init_pll(struct ath_hw *ah,
1032 struct ath9k_channel *chan)
1036 if (AR_SREV_9100(ah)) {
1037 if (chan && IS_CHAN_5GHZ(chan))
1042 if (AR_SREV_9280_10_OR_LATER(ah)) {
1043 pll = SM(0x5, AR_RTC_9160_PLL_REFDIV);
1045 if (chan && IS_CHAN_HALF_RATE(chan))
1046 pll |= SM(0x1, AR_RTC_9160_PLL_CLKSEL);
1047 else if (chan && IS_CHAN_QUARTER_RATE(chan))
1048 pll |= SM(0x2, AR_RTC_9160_PLL_CLKSEL);
1050 if (chan && IS_CHAN_5GHZ(chan)) {
1051 pll |= SM(0x28, AR_RTC_9160_PLL_DIV);
1054 if (AR_SREV_9280_20(ah)) {
1055 if (((chan->channel % 20) == 0)
1056 || ((chan->channel % 10) == 0))
1062 pll |= SM(0x2c, AR_RTC_9160_PLL_DIV);
1065 } else if (AR_SREV_9160_10_OR_LATER(ah)) {
1067 pll = SM(0x5, AR_RTC_9160_PLL_REFDIV);
1069 if (chan && IS_CHAN_HALF_RATE(chan))
1070 pll |= SM(0x1, AR_RTC_9160_PLL_CLKSEL);
1071 else if (chan && IS_CHAN_QUARTER_RATE(chan))
1072 pll |= SM(0x2, AR_RTC_9160_PLL_CLKSEL);
1074 if (chan && IS_CHAN_5GHZ(chan))
1075 pll |= SM(0x50, AR_RTC_9160_PLL_DIV);
1077 pll |= SM(0x58, AR_RTC_9160_PLL_DIV);
1079 pll = AR_RTC_PLL_REFDIV_5 | AR_RTC_PLL_DIV2;
1081 if (chan && IS_CHAN_HALF_RATE(chan))
1082 pll |= SM(0x1, AR_RTC_PLL_CLKSEL);
1083 else if (chan && IS_CHAN_QUARTER_RATE(chan))
1084 pll |= SM(0x2, AR_RTC_PLL_CLKSEL);
1086 if (chan && IS_CHAN_5GHZ(chan))
1087 pll |= SM(0xa, AR_RTC_PLL_DIV);
1089 pll |= SM(0xb, AR_RTC_PLL_DIV);
1092 REG_WRITE(ah, AR_RTC_PLL_CONTROL, pll);
1094 udelay(RTC_PLL_SETTLE_DELAY);
1096 REG_WRITE(ah, AR_RTC_SLEEP_CLK, AR_RTC_FORCE_DERIVED_CLK);
1099 static void ath9k_hw_init_chain_masks(struct ath_hw *ah)
1101 int rx_chainmask, tx_chainmask;
1103 rx_chainmask = ah->rxchainmask;
1104 tx_chainmask = ah->txchainmask;
1106 switch (rx_chainmask) {
1108 REG_SET_BIT(ah, AR_PHY_ANALOG_SWAP,
1109 AR_PHY_SWAP_ALT_CHAIN);
1111 if (((ah)->hw_version.macVersion <= AR_SREV_VERSION_9160)) {
1112 REG_WRITE(ah, AR_PHY_RX_CHAINMASK, 0x7);
1113 REG_WRITE(ah, AR_PHY_CAL_CHAINMASK, 0x7);
1119 REG_WRITE(ah, AR_PHY_RX_CHAINMASK, rx_chainmask);
1120 REG_WRITE(ah, AR_PHY_CAL_CHAINMASK, rx_chainmask);
1126 REG_WRITE(ah, AR_SELFGEN_MASK, tx_chainmask);
1127 if (tx_chainmask == 0x5) {
1128 REG_SET_BIT(ah, AR_PHY_ANALOG_SWAP,
1129 AR_PHY_SWAP_ALT_CHAIN);
1131 if (AR_SREV_9100(ah))
1132 REG_WRITE(ah, AR_PHY_ANALOG_SWAP,
1133 REG_READ(ah, AR_PHY_ANALOG_SWAP) | 0x00000001);
1136 static void ath9k_hw_init_interrupt_masks(struct ath_hw *ah,
1137 enum nl80211_iftype opmode)
1139 ah->mask_reg = AR_IMR_TXERR |
1145 if (ah->config.intr_mitigation)
1146 ah->mask_reg |= AR_IMR_RXINTM | AR_IMR_RXMINTR;
1148 ah->mask_reg |= AR_IMR_RXOK;
1150 ah->mask_reg |= AR_IMR_TXOK;
1152 if (opmode == NL80211_IFTYPE_AP)
1153 ah->mask_reg |= AR_IMR_MIB;
1155 REG_WRITE(ah, AR_IMR, ah->mask_reg);
1156 REG_WRITE(ah, AR_IMR_S2, REG_READ(ah, AR_IMR_S2) | AR_IMR_S2_GTT);
1158 if (!AR_SREV_9100(ah)) {
1159 REG_WRITE(ah, AR_INTR_SYNC_CAUSE, 0xFFFFFFFF);
1160 REG_WRITE(ah, AR_INTR_SYNC_ENABLE, AR_INTR_SYNC_DEFAULT);
1161 REG_WRITE(ah, AR_INTR_SYNC_MASK, 0);
1165 static bool ath9k_hw_set_ack_timeout(struct ath_hw *ah, u32 us)
1167 if (us > ath9k_hw_mac_to_usec(ah, MS(0xffffffff, AR_TIME_OUT_ACK))) {
1168 DPRINTF(ah, ATH_DBG_RESET, "bad ack timeout %u\n", us);
1169 ah->acktimeout = (u32) -1;
1172 REG_RMW_FIELD(ah, AR_TIME_OUT,
1173 AR_TIME_OUT_ACK, ath9k_hw_mac_to_clks(ah, us));
1174 ah->acktimeout = us;
1179 static bool ath9k_hw_set_cts_timeout(struct ath_hw *ah, u32 us)
1181 if (us > ath9k_hw_mac_to_usec(ah, MS(0xffffffff, AR_TIME_OUT_CTS))) {
1182 DPRINTF(ah, ATH_DBG_RESET, "bad cts timeout %u\n", us);
1183 ah->ctstimeout = (u32) -1;
1186 REG_RMW_FIELD(ah, AR_TIME_OUT,
1187 AR_TIME_OUT_CTS, ath9k_hw_mac_to_clks(ah, us));
1188 ah->ctstimeout = us;
1193 static bool ath9k_hw_set_global_txtimeout(struct ath_hw *ah, u32 tu)
1196 DPRINTF(ah, ATH_DBG_XMIT,
1197 "bad global tx timeout %u\n", tu);
1198 ah->globaltxtimeout = (u32) -1;
1201 REG_RMW_FIELD(ah, AR_GTXTO, AR_GTXTO_TIMEOUT_LIMIT, tu);
1202 ah->globaltxtimeout = tu;
1207 static void ath9k_hw_init_user_settings(struct ath_hw *ah)
1209 DPRINTF(ah, ATH_DBG_RESET, "ah->misc_mode 0x%x\n",
1212 if (ah->misc_mode != 0)
1213 REG_WRITE(ah, AR_PCU_MISC,
1214 REG_READ(ah, AR_PCU_MISC) | ah->misc_mode);
1215 if (ah->slottime != (u32) -1)
1216 ath9k_hw_setslottime(ah, ah->slottime);
1217 if (ah->acktimeout != (u32) -1)
1218 ath9k_hw_set_ack_timeout(ah, ah->acktimeout);
1219 if (ah->ctstimeout != (u32) -1)
1220 ath9k_hw_set_cts_timeout(ah, ah->ctstimeout);
1221 if (ah->globaltxtimeout != (u32) -1)
1222 ath9k_hw_set_global_txtimeout(ah, ah->globaltxtimeout);
1225 const char *ath9k_hw_probe(u16 vendorid, u16 devid)
1227 return vendorid == ATHEROS_VENDOR_ID ?
1228 ath9k_hw_devname(devid) : NULL;
1231 void ath9k_hw_detach(struct ath_hw *ah)
1233 if (!AR_SREV_9100(ah))
1234 ath9k_hw_ani_disable(ah);
1236 ath9k_hw_rf_free(ah);
1237 ath9k_hw_setpower(ah, ATH9K_PM_FULL_SLEEP);
1246 static void ath9k_hw_override_ini(struct ath_hw *ah,
1247 struct ath9k_channel *chan)
1251 if (AR_SREV_9271(ah)) {
1253 * Enable spectral scan to solution for issues with stuck
1254 * beacons on AR9271 1.0. The beacon stuck issue is not seeon on
1257 if (AR_SREV_9271_10(ah)) {
1258 val = REG_READ(ah, AR_PHY_SPECTRAL_SCAN) | AR_PHY_SPECTRAL_SCAN_ENABLE;
1259 REG_WRITE(ah, AR_PHY_SPECTRAL_SCAN, val);
1261 else if (AR_SREV_9271_11(ah))
1263 * change AR_PHY_RF_CTL3 setting to fix MAC issue
1264 * present on AR9271 1.1
1266 REG_WRITE(ah, AR_PHY_RF_CTL3, 0x3a020001);
1271 * Set the RX_ABORT and RX_DIS and clear if off only after
1272 * RXE is set for MAC. This prevents frames with corrupted
1273 * descriptor status.
1275 REG_SET_BIT(ah, AR_DIAG_SW, (AR_DIAG_RX_DIS | AR_DIAG_RX_ABORT));
1277 if (AR_SREV_9280_10_OR_LATER(ah)) {
1278 val = REG_READ(ah, AR_PCU_MISC_MODE2) &
1279 (~AR_PCU_MISC_MODE2_HWWAR1);
1281 if (AR_SREV_9287_10_OR_LATER(ah))
1282 val = val & (~AR_PCU_MISC_MODE2_HWWAR2);
1284 REG_WRITE(ah, AR_PCU_MISC_MODE2, val);
1287 if (!AR_SREV_5416_20_OR_LATER(ah) ||
1288 AR_SREV_9280_10_OR_LATER(ah))
1291 * Disable BB clock gating
1292 * Necessary to avoid issues on AR5416 2.0
1294 REG_WRITE(ah, 0x9800 + (651 << 2), 0x11);
1297 static u32 ath9k_hw_def_ini_fixup(struct ath_hw *ah,
1298 struct ar5416_eeprom_def *pEepData,
1301 struct base_eep_header *pBase = &(pEepData->baseEepHeader);
1303 switch (ah->hw_version.devid) {
1304 case AR9280_DEVID_PCI:
1305 if (reg == 0x7894) {
1306 DPRINTF(ah, ATH_DBG_EEPROM,
1307 "ini VAL: %x EEPROM: %x\n", value,
1308 (pBase->version & 0xff));
1310 if ((pBase->version & 0xff) > 0x0a) {
1311 DPRINTF(ah, ATH_DBG_EEPROM,
1314 value &= ~AR_AN_TOP2_PWDCLKIND;
1315 value |= AR_AN_TOP2_PWDCLKIND &
1316 (pBase->pwdclkind << AR_AN_TOP2_PWDCLKIND_S);
1318 DPRINTF(ah, ATH_DBG_EEPROM,
1319 "PWDCLKIND Earlier Rev\n");
1322 DPRINTF(ah, ATH_DBG_EEPROM,
1323 "final ini VAL: %x\n", value);
1331 static u32 ath9k_hw_ini_fixup(struct ath_hw *ah,
1332 struct ar5416_eeprom_def *pEepData,
1335 if (ah->eep_map == EEP_MAP_4KBITS)
1338 return ath9k_hw_def_ini_fixup(ah, pEepData, reg, value);
1341 static void ath9k_olc_init(struct ath_hw *ah)
1345 if (OLC_FOR_AR9287_10_LATER) {
1346 REG_SET_BIT(ah, AR_PHY_TX_PWRCTRL9,
1347 AR_PHY_TX_PWRCTRL9_RES_DC_REMOVAL);
1348 ath9k_hw_analog_shift_rmw(ah, AR9287_AN_TXPC0,
1349 AR9287_AN_TXPC0_TXPCMODE,
1350 AR9287_AN_TXPC0_TXPCMODE_S,
1351 AR9287_AN_TXPC0_TXPCMODE_TEMPSENSE);
1354 for (i = 0; i < AR9280_TX_GAIN_TABLE_SIZE; i++)
1355 ah->originalGain[i] =
1356 MS(REG_READ(ah, AR_PHY_TX_GAIN_TBL1 + i * 4),
1362 static u32 ath9k_regd_get_ctl(struct ath_regulatory *reg,
1363 struct ath9k_channel *chan)
1365 u32 ctl = ath_regd_get_band_ctl(reg, chan->chan->band);
1367 if (IS_CHAN_B(chan))
1369 else if (IS_CHAN_G(chan))
1377 static int ath9k_hw_process_ini(struct ath_hw *ah,
1378 struct ath9k_channel *chan,
1379 enum ath9k_ht_macmode macmode)
1381 struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah);
1382 int i, regWrites = 0;
1383 struct ieee80211_channel *channel = chan->chan;
1384 u32 modesIndex, freqIndex;
1386 switch (chan->chanmode) {
1388 case CHANNEL_A_HT20:
1392 case CHANNEL_A_HT40PLUS:
1393 case CHANNEL_A_HT40MINUS:
1398 case CHANNEL_G_HT20:
1403 case CHANNEL_G_HT40PLUS:
1404 case CHANNEL_G_HT40MINUS:
1413 REG_WRITE(ah, AR_PHY(0), 0x00000007);
1414 REG_WRITE(ah, AR_PHY_ADC_SERIAL_CTL, AR_PHY_SEL_EXTERNAL_RADIO);
1415 ah->eep_ops->set_addac(ah, chan);
1417 if (AR_SREV_5416_22_OR_LATER(ah)) {
1418 REG_WRITE_ARRAY(&ah->iniAddac, 1, regWrites);
1420 struct ar5416IniArray temp;
1422 sizeof(u32) * ah->iniAddac.ia_rows *
1423 ah->iniAddac.ia_columns;
1425 memcpy(ah->addac5416_21,
1426 ah->iniAddac.ia_array, addacSize);
1428 (ah->addac5416_21)[31 * ah->iniAddac.ia_columns + 1] = 0;
1430 temp.ia_array = ah->addac5416_21;
1431 temp.ia_columns = ah->iniAddac.ia_columns;
1432 temp.ia_rows = ah->iniAddac.ia_rows;
1433 REG_WRITE_ARRAY(&temp, 1, regWrites);
1436 REG_WRITE(ah, AR_PHY_ADC_SERIAL_CTL, AR_PHY_SEL_INTERNAL_ADDAC);
1438 for (i = 0; i < ah->iniModes.ia_rows; i++) {
1439 u32 reg = INI_RA(&ah->iniModes, i, 0);
1440 u32 val = INI_RA(&ah->iniModes, i, modesIndex);
1442 REG_WRITE(ah, reg, val);
1444 if (reg >= 0x7800 && reg < 0x78a0
1445 && ah->config.analog_shiftreg) {
1449 DO_DELAY(regWrites);
1452 if (AR_SREV_9280(ah) || AR_SREV_9287_10_OR_LATER(ah))
1453 REG_WRITE_ARRAY(&ah->iniModesRxGain, modesIndex, regWrites);
1455 if (AR_SREV_9280(ah) || AR_SREV_9285_12_OR_LATER(ah) ||
1456 AR_SREV_9287_10_OR_LATER(ah))
1457 REG_WRITE_ARRAY(&ah->iniModesTxGain, modesIndex, regWrites);
1459 for (i = 0; i < ah->iniCommon.ia_rows; i++) {
1460 u32 reg = INI_RA(&ah->iniCommon, i, 0);
1461 u32 val = INI_RA(&ah->iniCommon, i, 1);
1463 REG_WRITE(ah, reg, val);
1465 if (reg >= 0x7800 && reg < 0x78a0
1466 && ah->config.analog_shiftreg) {
1470 DO_DELAY(regWrites);
1473 ath9k_hw_write_regs(ah, modesIndex, freqIndex, regWrites);
1475 if (AR_SREV_9280_20(ah) && IS_CHAN_A_5MHZ_SPACED(chan)) {
1476 REG_WRITE_ARRAY(&ah->iniModesAdditional, modesIndex,
1480 ath9k_hw_override_ini(ah, chan);
1481 ath9k_hw_set_regs(ah, chan, macmode);
1482 ath9k_hw_init_chain_masks(ah);
1484 if (OLC_FOR_AR9280_20_LATER)
1487 ah->eep_ops->set_txpower(ah, chan,
1488 ath9k_regd_get_ctl(regulatory, chan),
1489 channel->max_antenna_gain * 2,
1490 channel->max_power * 2,
1491 min((u32) MAX_RATE_POWER,
1492 (u32) regulatory->power_limit));
1494 if (!ath9k_hw_set_rf_regs(ah, chan, freqIndex)) {
1495 DPRINTF(ah, ATH_DBG_FATAL,
1496 "ar5416SetRfRegs failed\n");
1503 /****************************************/
1504 /* Reset and Channel Switching Routines */
1505 /****************************************/
1507 static void ath9k_hw_set_rfmode(struct ath_hw *ah, struct ath9k_channel *chan)
1514 rfMode |= (IS_CHAN_B(chan) || IS_CHAN_G(chan))
1515 ? AR_PHY_MODE_DYNAMIC : AR_PHY_MODE_OFDM;
1517 if (!AR_SREV_9280_10_OR_LATER(ah))
1518 rfMode |= (IS_CHAN_5GHZ(chan)) ?
1519 AR_PHY_MODE_RF5GHZ : AR_PHY_MODE_RF2GHZ;
1521 if (AR_SREV_9280_20(ah) && IS_CHAN_A_5MHZ_SPACED(chan))
1522 rfMode |= (AR_PHY_MODE_DYNAMIC | AR_PHY_MODE_DYN_CCK_DISABLE);
1524 REG_WRITE(ah, AR_PHY_MODE, rfMode);
1527 static void ath9k_hw_mark_phy_inactive(struct ath_hw *ah)
1529 REG_WRITE(ah, AR_PHY_ACTIVE, AR_PHY_ACTIVE_DIS);
1532 static inline void ath9k_hw_set_dma(struct ath_hw *ah)
1537 * set AHB_MODE not to do cacheline prefetches
1539 regval = REG_READ(ah, AR_AHB_MODE);
1540 REG_WRITE(ah, AR_AHB_MODE, regval | AR_AHB_PREFETCH_RD_EN);
1543 * let mac dma reads be in 128 byte chunks
1545 regval = REG_READ(ah, AR_TXCFG) & ~AR_TXCFG_DMASZ_MASK;
1546 REG_WRITE(ah, AR_TXCFG, regval | AR_TXCFG_DMASZ_128B);
1549 * Restore TX Trigger Level to its pre-reset value.
1550 * The initial value depends on whether aggregation is enabled, and is
1551 * adjusted whenever underruns are detected.
1553 REG_RMW_FIELD(ah, AR_TXCFG, AR_FTRIG, ah->tx_trig_level);
1556 * let mac dma writes be in 128 byte chunks
1558 regval = REG_READ(ah, AR_RXCFG) & ~AR_RXCFG_DMASZ_MASK;
1559 REG_WRITE(ah, AR_RXCFG, regval | AR_RXCFG_DMASZ_128B);
1562 * Setup receive FIFO threshold to hold off TX activities
1564 REG_WRITE(ah, AR_RXFIFO_CFG, 0x200);
1567 * reduce the number of usable entries in PCU TXBUF to avoid
1568 * wrap around issues.
1570 if (AR_SREV_9285(ah)) {
1571 /* For AR9285 the number of Fifos are reduced to half.
1572 * So set the usable tx buf size also to half to
1573 * avoid data/delimiter underruns
1575 REG_WRITE(ah, AR_PCU_TXBUF_CTRL,
1576 AR_9285_PCU_TXBUF_CTRL_USABLE_SIZE);
1577 } else if (!AR_SREV_9271(ah)) {
1578 REG_WRITE(ah, AR_PCU_TXBUF_CTRL,
1579 AR_PCU_TXBUF_CTRL_USABLE_SIZE);
1583 static void ath9k_hw_set_operating_mode(struct ath_hw *ah, int opmode)
1587 val = REG_READ(ah, AR_STA_ID1);
1588 val &= ~(AR_STA_ID1_STA_AP | AR_STA_ID1_ADHOC);
1590 case NL80211_IFTYPE_AP:
1591 REG_WRITE(ah, AR_STA_ID1, val | AR_STA_ID1_STA_AP
1592 | AR_STA_ID1_KSRCH_MODE);
1593 REG_CLR_BIT(ah, AR_CFG, AR_CFG_AP_ADHOC_INDICATION);
1595 case NL80211_IFTYPE_ADHOC:
1596 case NL80211_IFTYPE_MESH_POINT:
1597 REG_WRITE(ah, AR_STA_ID1, val | AR_STA_ID1_ADHOC
1598 | AR_STA_ID1_KSRCH_MODE);
1599 REG_SET_BIT(ah, AR_CFG, AR_CFG_AP_ADHOC_INDICATION);
1601 case NL80211_IFTYPE_STATION:
1602 case NL80211_IFTYPE_MONITOR:
1603 REG_WRITE(ah, AR_STA_ID1, val | AR_STA_ID1_KSRCH_MODE);
1608 static inline void ath9k_hw_get_delta_slope_vals(struct ath_hw *ah,
1613 u32 coef_exp, coef_man;
1615 for (coef_exp = 31; coef_exp > 0; coef_exp--)
1616 if ((coef_scaled >> coef_exp) & 0x1)
1619 coef_exp = 14 - (coef_exp - COEF_SCALE_S);
1621 coef_man = coef_scaled + (1 << (COEF_SCALE_S - coef_exp - 1));
1623 *coef_mantissa = coef_man >> (COEF_SCALE_S - coef_exp);
1624 *coef_exponent = coef_exp - 16;
1627 static void ath9k_hw_set_delta_slope(struct ath_hw *ah,
1628 struct ath9k_channel *chan)
1630 u32 coef_scaled, ds_coef_exp, ds_coef_man;
1631 u32 clockMhzScaled = 0x64000000;
1632 struct chan_centers centers;
1634 if (IS_CHAN_HALF_RATE(chan))
1635 clockMhzScaled = clockMhzScaled >> 1;
1636 else if (IS_CHAN_QUARTER_RATE(chan))
1637 clockMhzScaled = clockMhzScaled >> 2;
1639 ath9k_hw_get_channel_centers(ah, chan, ¢ers);
1640 coef_scaled = clockMhzScaled / centers.synth_center;
1642 ath9k_hw_get_delta_slope_vals(ah, coef_scaled, &ds_coef_man,
1645 REG_RMW_FIELD(ah, AR_PHY_TIMING3,
1646 AR_PHY_TIMING3_DSC_MAN, ds_coef_man);
1647 REG_RMW_FIELD(ah, AR_PHY_TIMING3,
1648 AR_PHY_TIMING3_DSC_EXP, ds_coef_exp);
1650 coef_scaled = (9 * coef_scaled) / 10;
1652 ath9k_hw_get_delta_slope_vals(ah, coef_scaled, &ds_coef_man,
1655 REG_RMW_FIELD(ah, AR_PHY_HALFGI,
1656 AR_PHY_HALFGI_DSC_MAN, ds_coef_man);
1657 REG_RMW_FIELD(ah, AR_PHY_HALFGI,
1658 AR_PHY_HALFGI_DSC_EXP, ds_coef_exp);
1661 static bool ath9k_hw_set_reset(struct ath_hw *ah, int type)
1666 if (AR_SREV_9100(ah)) {
1667 u32 val = REG_READ(ah, AR_RTC_DERIVED_CLK);
1668 val &= ~AR_RTC_DERIVED_CLK_PERIOD;
1669 val |= SM(1, AR_RTC_DERIVED_CLK_PERIOD);
1670 REG_WRITE(ah, AR_RTC_DERIVED_CLK, val);
1671 (void)REG_READ(ah, AR_RTC_DERIVED_CLK);
1674 REG_WRITE(ah, AR_RTC_FORCE_WAKE, AR_RTC_FORCE_WAKE_EN |
1675 AR_RTC_FORCE_WAKE_ON_INT);
1677 if (AR_SREV_9100(ah)) {
1678 rst_flags = AR_RTC_RC_MAC_WARM | AR_RTC_RC_MAC_COLD |
1679 AR_RTC_RC_COLD_RESET | AR_RTC_RC_WARM_RESET;
1681 tmpReg = REG_READ(ah, AR_INTR_SYNC_CAUSE);
1683 (AR_INTR_SYNC_LOCAL_TIMEOUT |
1684 AR_INTR_SYNC_RADM_CPL_TIMEOUT)) {
1685 REG_WRITE(ah, AR_INTR_SYNC_ENABLE, 0);
1686 REG_WRITE(ah, AR_RC, AR_RC_AHB | AR_RC_HOSTIF);
1688 REG_WRITE(ah, AR_RC, AR_RC_AHB);
1691 rst_flags = AR_RTC_RC_MAC_WARM;
1692 if (type == ATH9K_RESET_COLD)
1693 rst_flags |= AR_RTC_RC_MAC_COLD;
1696 REG_WRITE(ah, AR_RTC_RC, rst_flags);
1699 REG_WRITE(ah, AR_RTC_RC, 0);
1700 if (!ath9k_hw_wait(ah, AR_RTC_RC, AR_RTC_RC_M, 0, AH_WAIT_TIMEOUT)) {
1701 DPRINTF(ah, ATH_DBG_RESET,
1702 "RTC stuck in MAC reset\n");
1706 if (!AR_SREV_9100(ah))
1707 REG_WRITE(ah, AR_RC, 0);
1709 ath9k_hw_init_pll(ah, NULL);
1711 if (AR_SREV_9100(ah))
1717 static bool ath9k_hw_set_reset_power_on(struct ath_hw *ah)
1719 REG_WRITE(ah, AR_RTC_FORCE_WAKE, AR_RTC_FORCE_WAKE_EN |
1720 AR_RTC_FORCE_WAKE_ON_INT);
1722 if (!AR_SREV_9100(ah))
1723 REG_WRITE(ah, AR_RC, AR_RC_AHB);
1725 REG_WRITE(ah, AR_RTC_RESET, 0);
1728 if (!AR_SREV_9100(ah))
1729 REG_WRITE(ah, AR_RC, 0);
1731 REG_WRITE(ah, AR_RTC_RESET, 1);
1733 if (!ath9k_hw_wait(ah,
1738 DPRINTF(ah, ATH_DBG_RESET, "RTC not waking up\n");
1742 ath9k_hw_read_revisions(ah);
1744 return ath9k_hw_set_reset(ah, ATH9K_RESET_WARM);
1747 static bool ath9k_hw_set_reset_reg(struct ath_hw *ah, u32 type)
1749 REG_WRITE(ah, AR_RTC_FORCE_WAKE,
1750 AR_RTC_FORCE_WAKE_EN | AR_RTC_FORCE_WAKE_ON_INT);
1753 case ATH9K_RESET_POWER_ON:
1754 return ath9k_hw_set_reset_power_on(ah);
1755 case ATH9K_RESET_WARM:
1756 case ATH9K_RESET_COLD:
1757 return ath9k_hw_set_reset(ah, type);
1763 static void ath9k_hw_set_regs(struct ath_hw *ah, struct ath9k_channel *chan,
1764 enum ath9k_ht_macmode macmode)
1767 u32 enableDacFifo = 0;
1769 if (AR_SREV_9285_10_OR_LATER(ah))
1770 enableDacFifo = (REG_READ(ah, AR_PHY_TURBO) &
1771 AR_PHY_FC_ENABLE_DAC_FIFO);
1773 phymode = AR_PHY_FC_HT_EN | AR_PHY_FC_SHORT_GI_40
1774 | AR_PHY_FC_SINGLE_HT_LTF1 | AR_PHY_FC_WALSH | enableDacFifo;
1776 if (IS_CHAN_HT40(chan)) {
1777 phymode |= AR_PHY_FC_DYN2040_EN;
1779 if ((chan->chanmode == CHANNEL_A_HT40PLUS) ||
1780 (chan->chanmode == CHANNEL_G_HT40PLUS))
1781 phymode |= AR_PHY_FC_DYN2040_PRI_CH;
1783 if (ah->extprotspacing == ATH9K_HT_EXTPROTSPACING_25)
1784 phymode |= AR_PHY_FC_DYN2040_EXT_CH;
1786 REG_WRITE(ah, AR_PHY_TURBO, phymode);
1788 ath9k_hw_set11nmac2040(ah, macmode);
1790 REG_WRITE(ah, AR_GTXTO, 25 << AR_GTXTO_TIMEOUT_LIMIT_S);
1791 REG_WRITE(ah, AR_CST, 0xF << AR_CST_TIMEOUT_LIMIT_S);
1794 static bool ath9k_hw_chip_reset(struct ath_hw *ah,
1795 struct ath9k_channel *chan)
1797 if (AR_SREV_9280(ah) && ah->eep_ops->get_eeprom(ah, EEP_OL_PWRCTRL)) {
1798 if (!ath9k_hw_set_reset_reg(ah, ATH9K_RESET_POWER_ON))
1800 } else if (!ath9k_hw_set_reset_reg(ah, ATH9K_RESET_WARM))
1803 if (!ath9k_hw_setpower(ah, ATH9K_PM_AWAKE))
1806 ah->chip_fullsleep = false;
1807 ath9k_hw_init_pll(ah, chan);
1808 ath9k_hw_set_rfmode(ah, chan);
1813 static bool ath9k_hw_channel_change(struct ath_hw *ah,
1814 struct ath9k_channel *chan,
1815 enum ath9k_ht_macmode macmode)
1817 struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah);
1818 struct ieee80211_channel *channel = chan->chan;
1819 u32 synthDelay, qnum;
1821 for (qnum = 0; qnum < AR_NUM_QCU; qnum++) {
1822 if (ath9k_hw_numtxpending(ah, qnum)) {
1823 DPRINTF(ah, ATH_DBG_QUEUE,
1824 "Transmit frames pending on queue %d\n", qnum);
1829 REG_WRITE(ah, AR_PHY_RFBUS_REQ, AR_PHY_RFBUS_REQ_EN);
1830 if (!ath9k_hw_wait(ah, AR_PHY_RFBUS_GRANT, AR_PHY_RFBUS_GRANT_EN,
1831 AR_PHY_RFBUS_GRANT_EN, AH_WAIT_TIMEOUT)) {
1832 DPRINTF(ah, ATH_DBG_FATAL,
1833 "Could not kill baseband RX\n");
1837 ath9k_hw_set_regs(ah, chan, macmode);
1839 if (AR_SREV_9280_10_OR_LATER(ah)) {
1840 ath9k_hw_ar9280_set_channel(ah, chan);
1842 if (!(ath9k_hw_set_channel(ah, chan))) {
1843 DPRINTF(ah, ATH_DBG_FATAL,
1844 "Failed to set channel\n");
1849 ah->eep_ops->set_txpower(ah, chan,
1850 ath9k_regd_get_ctl(regulatory, chan),
1851 channel->max_antenna_gain * 2,
1852 channel->max_power * 2,
1853 min((u32) MAX_RATE_POWER,
1854 (u32) regulatory->power_limit));
1856 synthDelay = REG_READ(ah, AR_PHY_RX_DELAY) & AR_PHY_RX_DELAY_DELAY;
1857 if (IS_CHAN_B(chan))
1858 synthDelay = (4 * synthDelay) / 22;
1862 udelay(synthDelay + BASE_ACTIVATE_DELAY);
1864 REG_WRITE(ah, AR_PHY_RFBUS_REQ, 0);
1866 if (IS_CHAN_OFDM(chan) || IS_CHAN_HT(chan))
1867 ath9k_hw_set_delta_slope(ah, chan);
1869 if (AR_SREV_9280_10_OR_LATER(ah))
1870 ath9k_hw_9280_spur_mitigate(ah, chan);
1872 ath9k_hw_spur_mitigate(ah, chan);
1874 if (!chan->oneTimeCalsDone)
1875 chan->oneTimeCalsDone = true;
1880 static void ath9k_hw_9280_spur_mitigate(struct ath_hw *ah, struct ath9k_channel *chan)
1882 int bb_spur = AR_NO_SPUR;
1885 int bb_spur_off, spur_subchannel_sd;
1887 int spur_delta_phase;
1889 int upper, lower, cur_vit_mask;
1892 int pilot_mask_reg[4] = { AR_PHY_TIMING7, AR_PHY_TIMING8,
1893 AR_PHY_PILOT_MASK_01_30, AR_PHY_PILOT_MASK_31_60
1895 int chan_mask_reg[4] = { AR_PHY_TIMING9, AR_PHY_TIMING10,
1896 AR_PHY_CHANNEL_MASK_01_30, AR_PHY_CHANNEL_MASK_31_60
1898 int inc[4] = { 0, 100, 0, 0 };
1899 struct chan_centers centers;
1906 bool is2GHz = IS_CHAN_2GHZ(chan);
1908 memset(&mask_m, 0, sizeof(int8_t) * 123);
1909 memset(&mask_p, 0, sizeof(int8_t) * 123);
1911 ath9k_hw_get_channel_centers(ah, chan, ¢ers);
1912 freq = centers.synth_center;
1914 ah->config.spurmode = SPUR_ENABLE_EEPROM;
1915 for (i = 0; i < AR_EEPROM_MODAL_SPURS; i++) {
1916 cur_bb_spur = ah->eep_ops->get_spur_channel(ah, i, is2GHz);
1919 cur_bb_spur = (cur_bb_spur / 10) + AR_BASE_FREQ_2GHZ;
1921 cur_bb_spur = (cur_bb_spur / 10) + AR_BASE_FREQ_5GHZ;
1923 if (AR_NO_SPUR == cur_bb_spur)
1925 cur_bb_spur = cur_bb_spur - freq;
1927 if (IS_CHAN_HT40(chan)) {
1928 if ((cur_bb_spur > -AR_SPUR_FEEQ_BOUND_HT40) &&
1929 (cur_bb_spur < AR_SPUR_FEEQ_BOUND_HT40)) {
1930 bb_spur = cur_bb_spur;
1933 } else if ((cur_bb_spur > -AR_SPUR_FEEQ_BOUND_HT20) &&
1934 (cur_bb_spur < AR_SPUR_FEEQ_BOUND_HT20)) {
1935 bb_spur = cur_bb_spur;
1940 if (AR_NO_SPUR == bb_spur) {
1941 REG_CLR_BIT(ah, AR_PHY_FORCE_CLKEN_CCK,
1942 AR_PHY_FORCE_CLKEN_CCK_MRC_MUX);
1945 REG_CLR_BIT(ah, AR_PHY_FORCE_CLKEN_CCK,
1946 AR_PHY_FORCE_CLKEN_CCK_MRC_MUX);
1949 bin = bb_spur * 320;
1951 tmp = REG_READ(ah, AR_PHY_TIMING_CTRL4(0));
1953 newVal = tmp | (AR_PHY_TIMING_CTRL4_ENABLE_SPUR_RSSI |
1954 AR_PHY_TIMING_CTRL4_ENABLE_SPUR_FILTER |
1955 AR_PHY_TIMING_CTRL4_ENABLE_CHAN_MASK |
1956 AR_PHY_TIMING_CTRL4_ENABLE_PILOT_MASK);
1957 REG_WRITE(ah, AR_PHY_TIMING_CTRL4(0), newVal);
1959 newVal = (AR_PHY_SPUR_REG_MASK_RATE_CNTL |
1960 AR_PHY_SPUR_REG_ENABLE_MASK_PPM |
1961 AR_PHY_SPUR_REG_MASK_RATE_SELECT |
1962 AR_PHY_SPUR_REG_ENABLE_VIT_SPUR_RSSI |
1963 SM(SPUR_RSSI_THRESH, AR_PHY_SPUR_REG_SPUR_RSSI_THRESH));
1964 REG_WRITE(ah, AR_PHY_SPUR_REG, newVal);
1966 if (IS_CHAN_HT40(chan)) {
1968 spur_subchannel_sd = 1;
1969 bb_spur_off = bb_spur + 10;
1971 spur_subchannel_sd = 0;
1972 bb_spur_off = bb_spur - 10;
1975 spur_subchannel_sd = 0;
1976 bb_spur_off = bb_spur;
1979 if (IS_CHAN_HT40(chan))
1981 ((bb_spur * 262144) /
1982 10) & AR_PHY_TIMING11_SPUR_DELTA_PHASE;
1985 ((bb_spur * 524288) /
1986 10) & AR_PHY_TIMING11_SPUR_DELTA_PHASE;
1988 denominator = IS_CHAN_2GHZ(chan) ? 44 : 40;
1989 spur_freq_sd = ((bb_spur_off * 2048) / denominator) & 0x3ff;
1991 newVal = (AR_PHY_TIMING11_USE_SPUR_IN_AGC |
1992 SM(spur_freq_sd, AR_PHY_TIMING11_SPUR_FREQ_SD) |
1993 SM(spur_delta_phase, AR_PHY_TIMING11_SPUR_DELTA_PHASE));
1994 REG_WRITE(ah, AR_PHY_TIMING11, newVal);
1996 newVal = spur_subchannel_sd << AR_PHY_SFCORR_SPUR_SUBCHNL_SD_S;
1997 REG_WRITE(ah, AR_PHY_SFCORR_EXT, newVal);
2003 for (i = 0; i < 4; i++) {
2007 for (bp = 0; bp < 30; bp++) {
2008 if ((cur_bin > lower) && (cur_bin < upper)) {
2009 pilot_mask = pilot_mask | 0x1 << bp;
2010 chan_mask = chan_mask | 0x1 << bp;
2015 REG_WRITE(ah, pilot_mask_reg[i], pilot_mask);
2016 REG_WRITE(ah, chan_mask_reg[i], chan_mask);
2019 cur_vit_mask = 6100;
2023 for (i = 0; i < 123; i++) {
2024 if ((cur_vit_mask > lower) && (cur_vit_mask < upper)) {
2026 /* workaround for gcc bug #37014 */
2027 volatile int tmp_v = abs(cur_vit_mask - bin);
2033 if (cur_vit_mask < 0)
2034 mask_m[abs(cur_vit_mask / 100)] = mask_amt;
2036 mask_p[cur_vit_mask / 100] = mask_amt;
2038 cur_vit_mask -= 100;
2041 tmp_mask = (mask_m[46] << 30) | (mask_m[47] << 28)
2042 | (mask_m[48] << 26) | (mask_m[49] << 24)
2043 | (mask_m[50] << 22) | (mask_m[51] << 20)
2044 | (mask_m[52] << 18) | (mask_m[53] << 16)
2045 | (mask_m[54] << 14) | (mask_m[55] << 12)
2046 | (mask_m[56] << 10) | (mask_m[57] << 8)
2047 | (mask_m[58] << 6) | (mask_m[59] << 4)
2048 | (mask_m[60] << 2) | (mask_m[61] << 0);
2049 REG_WRITE(ah, AR_PHY_BIN_MASK_1, tmp_mask);
2050 REG_WRITE(ah, AR_PHY_VIT_MASK2_M_46_61, tmp_mask);
2052 tmp_mask = (mask_m[31] << 28)
2053 | (mask_m[32] << 26) | (mask_m[33] << 24)
2054 | (mask_m[34] << 22) | (mask_m[35] << 20)
2055 | (mask_m[36] << 18) | (mask_m[37] << 16)
2056 | (mask_m[48] << 14) | (mask_m[39] << 12)
2057 | (mask_m[40] << 10) | (mask_m[41] << 8)
2058 | (mask_m[42] << 6) | (mask_m[43] << 4)
2059 | (mask_m[44] << 2) | (mask_m[45] << 0);
2060 REG_WRITE(ah, AR_PHY_BIN_MASK_2, tmp_mask);
2061 REG_WRITE(ah, AR_PHY_MASK2_M_31_45, tmp_mask);
2063 tmp_mask = (mask_m[16] << 30) | (mask_m[16] << 28)
2064 | (mask_m[18] << 26) | (mask_m[18] << 24)
2065 | (mask_m[20] << 22) | (mask_m[20] << 20)
2066 | (mask_m[22] << 18) | (mask_m[22] << 16)
2067 | (mask_m[24] << 14) | (mask_m[24] << 12)
2068 | (mask_m[25] << 10) | (mask_m[26] << 8)
2069 | (mask_m[27] << 6) | (mask_m[28] << 4)
2070 | (mask_m[29] << 2) | (mask_m[30] << 0);
2071 REG_WRITE(ah, AR_PHY_BIN_MASK_3, tmp_mask);
2072 REG_WRITE(ah, AR_PHY_MASK2_M_16_30, tmp_mask);
2074 tmp_mask = (mask_m[0] << 30) | (mask_m[1] << 28)
2075 | (mask_m[2] << 26) | (mask_m[3] << 24)
2076 | (mask_m[4] << 22) | (mask_m[5] << 20)
2077 | (mask_m[6] << 18) | (mask_m[7] << 16)
2078 | (mask_m[8] << 14) | (mask_m[9] << 12)
2079 | (mask_m[10] << 10) | (mask_m[11] << 8)
2080 | (mask_m[12] << 6) | (mask_m[13] << 4)
2081 | (mask_m[14] << 2) | (mask_m[15] << 0);
2082 REG_WRITE(ah, AR_PHY_MASK_CTL, tmp_mask);
2083 REG_WRITE(ah, AR_PHY_MASK2_M_00_15, tmp_mask);
2085 tmp_mask = (mask_p[15] << 28)
2086 | (mask_p[14] << 26) | (mask_p[13] << 24)
2087 | (mask_p[12] << 22) | (mask_p[11] << 20)
2088 | (mask_p[10] << 18) | (mask_p[9] << 16)
2089 | (mask_p[8] << 14) | (mask_p[7] << 12)
2090 | (mask_p[6] << 10) | (mask_p[5] << 8)
2091 | (mask_p[4] << 6) | (mask_p[3] << 4)
2092 | (mask_p[2] << 2) | (mask_p[1] << 0);
2093 REG_WRITE(ah, AR_PHY_BIN_MASK2_1, tmp_mask);
2094 REG_WRITE(ah, AR_PHY_MASK2_P_15_01, tmp_mask);
2096 tmp_mask = (mask_p[30] << 28)
2097 | (mask_p[29] << 26) | (mask_p[28] << 24)
2098 | (mask_p[27] << 22) | (mask_p[26] << 20)
2099 | (mask_p[25] << 18) | (mask_p[24] << 16)
2100 | (mask_p[23] << 14) | (mask_p[22] << 12)
2101 | (mask_p[21] << 10) | (mask_p[20] << 8)
2102 | (mask_p[19] << 6) | (mask_p[18] << 4)
2103 | (mask_p[17] << 2) | (mask_p[16] << 0);
2104 REG_WRITE(ah, AR_PHY_BIN_MASK2_2, tmp_mask);
2105 REG_WRITE(ah, AR_PHY_MASK2_P_30_16, tmp_mask);
2107 tmp_mask = (mask_p[45] << 28)
2108 | (mask_p[44] << 26) | (mask_p[43] << 24)
2109 | (mask_p[42] << 22) | (mask_p[41] << 20)
2110 | (mask_p[40] << 18) | (mask_p[39] << 16)
2111 | (mask_p[38] << 14) | (mask_p[37] << 12)
2112 | (mask_p[36] << 10) | (mask_p[35] << 8)
2113 | (mask_p[34] << 6) | (mask_p[33] << 4)
2114 | (mask_p[32] << 2) | (mask_p[31] << 0);
2115 REG_WRITE(ah, AR_PHY_BIN_MASK2_3, tmp_mask);
2116 REG_WRITE(ah, AR_PHY_MASK2_P_45_31, tmp_mask);
2118 tmp_mask = (mask_p[61] << 30) | (mask_p[60] << 28)
2119 | (mask_p[59] << 26) | (mask_p[58] << 24)
2120 | (mask_p[57] << 22) | (mask_p[56] << 20)
2121 | (mask_p[55] << 18) | (mask_p[54] << 16)
2122 | (mask_p[53] << 14) | (mask_p[52] << 12)
2123 | (mask_p[51] << 10) | (mask_p[50] << 8)
2124 | (mask_p[49] << 6) | (mask_p[48] << 4)
2125 | (mask_p[47] << 2) | (mask_p[46] << 0);
2126 REG_WRITE(ah, AR_PHY_BIN_MASK2_4, tmp_mask);
2127 REG_WRITE(ah, AR_PHY_MASK2_P_61_45, tmp_mask);
2130 static void ath9k_hw_spur_mitigate(struct ath_hw *ah, struct ath9k_channel *chan)
2132 int bb_spur = AR_NO_SPUR;
2135 int spur_delta_phase;
2137 int upper, lower, cur_vit_mask;
2140 int pilot_mask_reg[4] = { AR_PHY_TIMING7, AR_PHY_TIMING8,
2141 AR_PHY_PILOT_MASK_01_30, AR_PHY_PILOT_MASK_31_60
2143 int chan_mask_reg[4] = { AR_PHY_TIMING9, AR_PHY_TIMING10,
2144 AR_PHY_CHANNEL_MASK_01_30, AR_PHY_CHANNEL_MASK_31_60
2146 int inc[4] = { 0, 100, 0, 0 };
2153 bool is2GHz = IS_CHAN_2GHZ(chan);
2155 memset(&mask_m, 0, sizeof(int8_t) * 123);
2156 memset(&mask_p, 0, sizeof(int8_t) * 123);
2158 for (i = 0; i < AR_EEPROM_MODAL_SPURS; i++) {
2159 cur_bb_spur = ah->eep_ops->get_spur_channel(ah, i, is2GHz);
2160 if (AR_NO_SPUR == cur_bb_spur)
2162 cur_bb_spur = cur_bb_spur - (chan->channel * 10);
2163 if ((cur_bb_spur > -95) && (cur_bb_spur < 95)) {
2164 bb_spur = cur_bb_spur;
2169 if (AR_NO_SPUR == bb_spur)
2174 tmp = REG_READ(ah, AR_PHY_TIMING_CTRL4(0));
2175 new = tmp | (AR_PHY_TIMING_CTRL4_ENABLE_SPUR_RSSI |
2176 AR_PHY_TIMING_CTRL4_ENABLE_SPUR_FILTER |
2177 AR_PHY_TIMING_CTRL4_ENABLE_CHAN_MASK |
2178 AR_PHY_TIMING_CTRL4_ENABLE_PILOT_MASK);
2180 REG_WRITE(ah, AR_PHY_TIMING_CTRL4(0), new);
2182 new = (AR_PHY_SPUR_REG_MASK_RATE_CNTL |
2183 AR_PHY_SPUR_REG_ENABLE_MASK_PPM |
2184 AR_PHY_SPUR_REG_MASK_RATE_SELECT |
2185 AR_PHY_SPUR_REG_ENABLE_VIT_SPUR_RSSI |
2186 SM(SPUR_RSSI_THRESH, AR_PHY_SPUR_REG_SPUR_RSSI_THRESH));
2187 REG_WRITE(ah, AR_PHY_SPUR_REG, new);
2189 spur_delta_phase = ((bb_spur * 524288) / 100) &
2190 AR_PHY_TIMING11_SPUR_DELTA_PHASE;
2192 denominator = IS_CHAN_2GHZ(chan) ? 440 : 400;
2193 spur_freq_sd = ((bb_spur * 2048) / denominator) & 0x3ff;
2195 new = (AR_PHY_TIMING11_USE_SPUR_IN_AGC |
2196 SM(spur_freq_sd, AR_PHY_TIMING11_SPUR_FREQ_SD) |
2197 SM(spur_delta_phase, AR_PHY_TIMING11_SPUR_DELTA_PHASE));
2198 REG_WRITE(ah, AR_PHY_TIMING11, new);
2204 for (i = 0; i < 4; i++) {
2208 for (bp = 0; bp < 30; bp++) {
2209 if ((cur_bin > lower) && (cur_bin < upper)) {
2210 pilot_mask = pilot_mask | 0x1 << bp;
2211 chan_mask = chan_mask | 0x1 << bp;
2216 REG_WRITE(ah, pilot_mask_reg[i], pilot_mask);
2217 REG_WRITE(ah, chan_mask_reg[i], chan_mask);
2220 cur_vit_mask = 6100;
2224 for (i = 0; i < 123; i++) {
2225 if ((cur_vit_mask > lower) && (cur_vit_mask < upper)) {
2227 /* workaround for gcc bug #37014 */
2228 volatile int tmp_v = abs(cur_vit_mask - bin);
2234 if (cur_vit_mask < 0)
2235 mask_m[abs(cur_vit_mask / 100)] = mask_amt;
2237 mask_p[cur_vit_mask / 100] = mask_amt;
2239 cur_vit_mask -= 100;
2242 tmp_mask = (mask_m[46] << 30) | (mask_m[47] << 28)
2243 | (mask_m[48] << 26) | (mask_m[49] << 24)
2244 | (mask_m[50] << 22) | (mask_m[51] << 20)
2245 | (mask_m[52] << 18) | (mask_m[53] << 16)
2246 | (mask_m[54] << 14) | (mask_m[55] << 12)
2247 | (mask_m[56] << 10) | (mask_m[57] << 8)
2248 | (mask_m[58] << 6) | (mask_m[59] << 4)
2249 | (mask_m[60] << 2) | (mask_m[61] << 0);
2250 REG_WRITE(ah, AR_PHY_BIN_MASK_1, tmp_mask);
2251 REG_WRITE(ah, AR_PHY_VIT_MASK2_M_46_61, tmp_mask);
2253 tmp_mask = (mask_m[31] << 28)
2254 | (mask_m[32] << 26) | (mask_m[33] << 24)
2255 | (mask_m[34] << 22) | (mask_m[35] << 20)
2256 | (mask_m[36] << 18) | (mask_m[37] << 16)
2257 | (mask_m[48] << 14) | (mask_m[39] << 12)
2258 | (mask_m[40] << 10) | (mask_m[41] << 8)
2259 | (mask_m[42] << 6) | (mask_m[43] << 4)
2260 | (mask_m[44] << 2) | (mask_m[45] << 0);
2261 REG_WRITE(ah, AR_PHY_BIN_MASK_2, tmp_mask);
2262 REG_WRITE(ah, AR_PHY_MASK2_M_31_45, tmp_mask);
2264 tmp_mask = (mask_m[16] << 30) | (mask_m[16] << 28)
2265 | (mask_m[18] << 26) | (mask_m[18] << 24)
2266 | (mask_m[20] << 22) | (mask_m[20] << 20)
2267 | (mask_m[22] << 18) | (mask_m[22] << 16)
2268 | (mask_m[24] << 14) | (mask_m[24] << 12)
2269 | (mask_m[25] << 10) | (mask_m[26] << 8)
2270 | (mask_m[27] << 6) | (mask_m[28] << 4)
2271 | (mask_m[29] << 2) | (mask_m[30] << 0);
2272 REG_WRITE(ah, AR_PHY_BIN_MASK_3, tmp_mask);
2273 REG_WRITE(ah, AR_PHY_MASK2_M_16_30, tmp_mask);
2275 tmp_mask = (mask_m[0] << 30) | (mask_m[1] << 28)
2276 | (mask_m[2] << 26) | (mask_m[3] << 24)
2277 | (mask_m[4] << 22) | (mask_m[5] << 20)
2278 | (mask_m[6] << 18) | (mask_m[7] << 16)
2279 | (mask_m[8] << 14) | (mask_m[9] << 12)
2280 | (mask_m[10] << 10) | (mask_m[11] << 8)
2281 | (mask_m[12] << 6) | (mask_m[13] << 4)
2282 | (mask_m[14] << 2) | (mask_m[15] << 0);
2283 REG_WRITE(ah, AR_PHY_MASK_CTL, tmp_mask);
2284 REG_WRITE(ah, AR_PHY_MASK2_M_00_15, tmp_mask);
2286 tmp_mask = (mask_p[15] << 28)
2287 | (mask_p[14] << 26) | (mask_p[13] << 24)
2288 | (mask_p[12] << 22) | (mask_p[11] << 20)
2289 | (mask_p[10] << 18) | (mask_p[9] << 16)
2290 | (mask_p[8] << 14) | (mask_p[7] << 12)
2291 | (mask_p[6] << 10) | (mask_p[5] << 8)
2292 | (mask_p[4] << 6) | (mask_p[3] << 4)
2293 | (mask_p[2] << 2) | (mask_p[1] << 0);
2294 REG_WRITE(ah, AR_PHY_BIN_MASK2_1, tmp_mask);
2295 REG_WRITE(ah, AR_PHY_MASK2_P_15_01, tmp_mask);
2297 tmp_mask = (mask_p[30] << 28)
2298 | (mask_p[29] << 26) | (mask_p[28] << 24)
2299 | (mask_p[27] << 22) | (mask_p[26] << 20)
2300 | (mask_p[25] << 18) | (mask_p[24] << 16)
2301 | (mask_p[23] << 14) | (mask_p[22] << 12)
2302 | (mask_p[21] << 10) | (mask_p[20] << 8)
2303 | (mask_p[19] << 6) | (mask_p[18] << 4)
2304 | (mask_p[17] << 2) | (mask_p[16] << 0);
2305 REG_WRITE(ah, AR_PHY_BIN_MASK2_2, tmp_mask);
2306 REG_WRITE(ah, AR_PHY_MASK2_P_30_16, tmp_mask);
2308 tmp_mask = (mask_p[45] << 28)
2309 | (mask_p[44] << 26) | (mask_p[43] << 24)
2310 | (mask_p[42] << 22) | (mask_p[41] << 20)
2311 | (mask_p[40] << 18) | (mask_p[39] << 16)
2312 | (mask_p[38] << 14) | (mask_p[37] << 12)
2313 | (mask_p[36] << 10) | (mask_p[35] << 8)
2314 | (mask_p[34] << 6) | (mask_p[33] << 4)
2315 | (mask_p[32] << 2) | (mask_p[31] << 0);
2316 REG_WRITE(ah, AR_PHY_BIN_MASK2_3, tmp_mask);
2317 REG_WRITE(ah, AR_PHY_MASK2_P_45_31, tmp_mask);
2319 tmp_mask = (mask_p[61] << 30) | (mask_p[60] << 28)
2320 | (mask_p[59] << 26) | (mask_p[58] << 24)
2321 | (mask_p[57] << 22) | (mask_p[56] << 20)
2322 | (mask_p[55] << 18) | (mask_p[54] << 16)
2323 | (mask_p[53] << 14) | (mask_p[52] << 12)
2324 | (mask_p[51] << 10) | (mask_p[50] << 8)
2325 | (mask_p[49] << 6) | (mask_p[48] << 4)
2326 | (mask_p[47] << 2) | (mask_p[46] << 0);
2327 REG_WRITE(ah, AR_PHY_BIN_MASK2_4, tmp_mask);
2328 REG_WRITE(ah, AR_PHY_MASK2_P_61_45, tmp_mask);
2331 static void ath9k_enable_rfkill(struct ath_hw *ah)
2333 REG_SET_BIT(ah, AR_GPIO_INPUT_EN_VAL,
2334 AR_GPIO_INPUT_EN_VAL_RFSILENT_BB);
2336 REG_CLR_BIT(ah, AR_GPIO_INPUT_MUX2,
2337 AR_GPIO_INPUT_MUX2_RFSILENT);
2339 ath9k_hw_cfg_gpio_input(ah, ah->rfkill_gpio);
2340 REG_SET_BIT(ah, AR_PHY_TEST, RFSILENT_BB);
2343 int ath9k_hw_reset(struct ath_hw *ah, struct ath9k_channel *chan,
2344 bool bChannelChange)
2347 struct ath_softc *sc = ah->ah_sc;
2348 struct ath9k_channel *curchan = ah->curchan;
2352 int i, rx_chainmask, r;
2354 ah->extprotspacing = sc->ht_extprotspacing;
2355 ah->txchainmask = sc->tx_chainmask;
2356 ah->rxchainmask = sc->rx_chainmask;
2358 if (!ath9k_hw_setpower(ah, ATH9K_PM_AWAKE))
2361 if (curchan && !ah->chip_fullsleep)
2362 ath9k_hw_getnf(ah, curchan);
2364 if (bChannelChange &&
2365 (ah->chip_fullsleep != true) &&
2366 (ah->curchan != NULL) &&
2367 (chan->channel != ah->curchan->channel) &&
2368 ((chan->channelFlags & CHANNEL_ALL) ==
2369 (ah->curchan->channelFlags & CHANNEL_ALL)) &&
2370 !(AR_SREV_9280(ah) || IS_CHAN_A_5MHZ_SPACED(chan) ||
2371 IS_CHAN_A_5MHZ_SPACED(ah->curchan))) {
2373 if (ath9k_hw_channel_change(ah, chan, sc->tx_chan_width)) {
2374 ath9k_hw_loadnf(ah, ah->curchan);
2375 ath9k_hw_start_nfcal(ah);
2380 saveDefAntenna = REG_READ(ah, AR_DEF_ANTENNA);
2381 if (saveDefAntenna == 0)
2384 macStaId1 = REG_READ(ah, AR_STA_ID1) & AR_STA_ID1_BASE_RATE_11B;
2386 /* For chips on which RTC reset is done, save TSF before it gets cleared */
2387 if (AR_SREV_9280(ah) && ah->eep_ops->get_eeprom(ah, EEP_OL_PWRCTRL))
2388 tsf = ath9k_hw_gettsf64(ah);
2390 saveLedState = REG_READ(ah, AR_CFG_LED) &
2391 (AR_CFG_LED_ASSOC_CTL | AR_CFG_LED_MODE_SEL |
2392 AR_CFG_LED_BLINK_THRESH_SEL | AR_CFG_LED_BLINK_SLOW);
2394 ath9k_hw_mark_phy_inactive(ah);
2396 if (AR_SREV_9271(ah) && ah->htc_reset_init) {
2398 AR9271_RESET_POWER_DOWN_CONTROL,
2399 AR9271_RADIO_RF_RST);
2403 if (!ath9k_hw_chip_reset(ah, chan)) {
2404 DPRINTF(ah, ATH_DBG_FATAL, "Chip reset failed\n");
2408 if (AR_SREV_9271(ah) && ah->htc_reset_init) {
2409 ah->htc_reset_init = false;
2411 AR9271_RESET_POWER_DOWN_CONTROL,
2412 AR9271_GATE_MAC_CTL);
2417 if (tsf && AR_SREV_9280(ah) && ah->eep_ops->get_eeprom(ah, EEP_OL_PWRCTRL))
2418 ath9k_hw_settsf64(ah, tsf);
2420 if (AR_SREV_9280_10_OR_LATER(ah))
2421 REG_SET_BIT(ah, AR_GPIO_INPUT_EN_VAL, AR_GPIO_JTAG_DISABLE);
2423 if (AR_SREV_9287_12_OR_LATER(ah)) {
2424 /* Enable ASYNC FIFO */
2425 REG_SET_BIT(ah, AR_MAC_PCU_ASYNC_FIFO_REG3,
2426 AR_MAC_PCU_ASYNC_FIFO_REG3_DATAPATH_SEL);
2427 REG_SET_BIT(ah, AR_PHY_MODE, AR_PHY_MODE_ASYNCFIFO);
2428 REG_CLR_BIT(ah, AR_MAC_PCU_ASYNC_FIFO_REG3,
2429 AR_MAC_PCU_ASYNC_FIFO_REG3_SOFT_RESET);
2430 REG_SET_BIT(ah, AR_MAC_PCU_ASYNC_FIFO_REG3,
2431 AR_MAC_PCU_ASYNC_FIFO_REG3_SOFT_RESET);
2433 r = ath9k_hw_process_ini(ah, chan, sc->tx_chan_width);
2437 /* Setup MFP options for CCMP */
2438 if (AR_SREV_9280_20_OR_LATER(ah)) {
2439 /* Mask Retry(b11), PwrMgt(b12), MoreData(b13) to 0 in mgmt
2440 * frames when constructing CCMP AAD. */
2441 REG_RMW_FIELD(ah, AR_AES_MUTE_MASK1, AR_AES_MUTE_MASK1_FC_MGMT,
2443 ah->sw_mgmt_crypto = false;
2444 } else if (AR_SREV_9160_10_OR_LATER(ah)) {
2445 /* Disable hardware crypto for management frames */
2446 REG_CLR_BIT(ah, AR_PCU_MISC_MODE2,
2447 AR_PCU_MISC_MODE2_MGMT_CRYPTO_ENABLE);
2448 REG_SET_BIT(ah, AR_PCU_MISC_MODE2,
2449 AR_PCU_MISC_MODE2_NO_CRYPTO_FOR_NON_DATA_PKT);
2450 ah->sw_mgmt_crypto = true;
2452 ah->sw_mgmt_crypto = true;
2454 if (IS_CHAN_OFDM(chan) || IS_CHAN_HT(chan))
2455 ath9k_hw_set_delta_slope(ah, chan);
2457 if (AR_SREV_9280_10_OR_LATER(ah))
2458 ath9k_hw_9280_spur_mitigate(ah, chan);
2460 ath9k_hw_spur_mitigate(ah, chan);
2462 ah->eep_ops->set_board_values(ah, chan);
2464 ath9k_hw_decrease_chain_power(ah, chan);
2466 REG_WRITE(ah, AR_STA_ID0, get_unaligned_le32(ah->macaddr));
2467 REG_WRITE(ah, AR_STA_ID1, get_unaligned_le16(ah->macaddr + 4)
2469 | AR_STA_ID1_RTS_USE_DEF
2471 ack_6mb ? AR_STA_ID1_ACKCTS_6MB : 0)
2472 | ah->sta_id1_defaults);
2473 ath9k_hw_set_operating_mode(ah, ah->opmode);
2475 REG_WRITE(ah, AR_BSSMSKL, get_unaligned_le32(sc->bssidmask));
2476 REG_WRITE(ah, AR_BSSMSKU, get_unaligned_le16(sc->bssidmask + 4));
2478 REG_WRITE(ah, AR_DEF_ANTENNA, saveDefAntenna);
2480 REG_WRITE(ah, AR_BSS_ID0, get_unaligned_le32(sc->curbssid));
2481 REG_WRITE(ah, AR_BSS_ID1, get_unaligned_le16(sc->curbssid + 4) |
2482 ((sc->curaid & 0x3fff) << AR_BSS_ID1_AID_S));
2484 REG_WRITE(ah, AR_ISR, ~0);
2486 REG_WRITE(ah, AR_RSSI_THR, INIT_RSSI_THR);
2488 if (AR_SREV_9280_10_OR_LATER(ah))
2489 ath9k_hw_ar9280_set_channel(ah, chan);
2491 if (!(ath9k_hw_set_channel(ah, chan)))
2494 for (i = 0; i < AR_NUM_DCU; i++)
2495 REG_WRITE(ah, AR_DQCUMASK(i), 1 << i);
2498 for (i = 0; i < ah->caps.total_queues; i++)
2499 ath9k_hw_resettxqueue(ah, i);
2501 ath9k_hw_init_interrupt_masks(ah, ah->opmode);
2502 ath9k_hw_init_qos(ah);
2504 if (ah->caps.hw_caps & ATH9K_HW_CAP_RFSILENT)
2505 ath9k_enable_rfkill(ah);
2507 ath9k_hw_init_user_settings(ah);
2509 if (AR_SREV_9287_12_OR_LATER(ah)) {
2510 REG_WRITE(ah, AR_D_GBL_IFS_SIFS,
2511 AR_D_GBL_IFS_SIFS_ASYNC_FIFO_DUR);
2512 REG_WRITE(ah, AR_D_GBL_IFS_SLOT,
2513 AR_D_GBL_IFS_SLOT_ASYNC_FIFO_DUR);
2514 REG_WRITE(ah, AR_D_GBL_IFS_EIFS,
2515 AR_D_GBL_IFS_EIFS_ASYNC_FIFO_DUR);
2517 REG_WRITE(ah, AR_TIME_OUT, AR_TIME_OUT_ACK_CTS_ASYNC_FIFO_DUR);
2518 REG_WRITE(ah, AR_USEC, AR_USEC_ASYNC_FIFO_DUR);
2520 REG_SET_BIT(ah, AR_MAC_PCU_LOGIC_ANALYZER,
2521 AR_MAC_PCU_LOGIC_ANALYZER_DISBUG20768);
2522 REG_RMW_FIELD(ah, AR_AHB_MODE, AR_AHB_CUSTOM_BURST_EN,
2523 AR_AHB_CUSTOM_BURST_ASYNC_FIFO_VAL);
2525 if (AR_SREV_9287_12_OR_LATER(ah)) {
2526 REG_SET_BIT(ah, AR_PCU_MISC_MODE2,
2527 AR_PCU_MISC_MODE2_ENABLE_AGGWEP);
2530 REG_WRITE(ah, AR_STA_ID1,
2531 REG_READ(ah, AR_STA_ID1) | AR_STA_ID1_PRESERVE_SEQNUM);
2533 ath9k_hw_set_dma(ah);
2535 REG_WRITE(ah, AR_OBS, 8);
2537 if (ah->config.intr_mitigation) {
2538 REG_RMW_FIELD(ah, AR_RIMT, AR_RIMT_LAST, 500);
2539 REG_RMW_FIELD(ah, AR_RIMT, AR_RIMT_FIRST, 2000);
2542 ath9k_hw_init_bb(ah, chan);
2544 if (!ath9k_hw_init_cal(ah, chan))
2547 rx_chainmask = ah->rxchainmask;
2548 if ((rx_chainmask == 0x5) || (rx_chainmask == 0x3)) {
2549 REG_WRITE(ah, AR_PHY_RX_CHAINMASK, rx_chainmask);
2550 REG_WRITE(ah, AR_PHY_CAL_CHAINMASK, rx_chainmask);
2553 REG_WRITE(ah, AR_CFG_LED, saveLedState | AR_CFG_SCLK_32KHZ);
2556 * For big endian systems turn on swapping for descriptors
2558 if (AR_SREV_9100(ah)) {
2560 mask = REG_READ(ah, AR_CFG);
2561 if (mask & (AR_CFG_SWRB | AR_CFG_SWTB | AR_CFG_SWRG)) {
2562 DPRINTF(ah, ATH_DBG_RESET,
2563 "CFG Byte Swap Set 0x%x\n", mask);
2566 INIT_CONFIG_STATUS | AR_CFG_SWRB | AR_CFG_SWTB;
2567 REG_WRITE(ah, AR_CFG, mask);
2568 DPRINTF(ah, ATH_DBG_RESET,
2569 "Setting CFG 0x%x\n", REG_READ(ah, AR_CFG));
2572 /* Configure AR9271 target WLAN */
2573 if (AR_SREV_9271(ah))
2574 REG_WRITE(ah, AR_CFG, AR_CFG_SWRB | AR_CFG_SWTB);
2577 REG_WRITE(ah, AR_CFG, AR_CFG_SWTD | AR_CFG_SWRD);
2581 if (ah->btcoex_hw.enabled)
2582 ath9k_hw_btcoex_enable(ah);
2587 /************************/
2588 /* Key Cache Management */
2589 /************************/
2591 bool ath9k_hw_keyreset(struct ath_hw *ah, u16 entry)
2595 if (entry >= ah->caps.keycache_size) {
2596 DPRINTF(ah, ATH_DBG_FATAL,
2597 "keychache entry %u out of range\n", entry);
2601 keyType = REG_READ(ah, AR_KEYTABLE_TYPE(entry));
2603 REG_WRITE(ah, AR_KEYTABLE_KEY0(entry), 0);
2604 REG_WRITE(ah, AR_KEYTABLE_KEY1(entry), 0);
2605 REG_WRITE(ah, AR_KEYTABLE_KEY2(entry), 0);
2606 REG_WRITE(ah, AR_KEYTABLE_KEY3(entry), 0);
2607 REG_WRITE(ah, AR_KEYTABLE_KEY4(entry), 0);
2608 REG_WRITE(ah, AR_KEYTABLE_TYPE(entry), AR_KEYTABLE_TYPE_CLR);
2609 REG_WRITE(ah, AR_KEYTABLE_MAC0(entry), 0);
2610 REG_WRITE(ah, AR_KEYTABLE_MAC1(entry), 0);
2612 if (keyType == AR_KEYTABLE_TYPE_TKIP && ATH9K_IS_MIC_ENABLED(ah)) {
2613 u16 micentry = entry + 64;
2615 REG_WRITE(ah, AR_KEYTABLE_KEY0(micentry), 0);
2616 REG_WRITE(ah, AR_KEYTABLE_KEY1(micentry), 0);
2617 REG_WRITE(ah, AR_KEYTABLE_KEY2(micentry), 0);
2618 REG_WRITE(ah, AR_KEYTABLE_KEY3(micentry), 0);
2625 bool ath9k_hw_keysetmac(struct ath_hw *ah, u16 entry, const u8 *mac)
2629 if (entry >= ah->caps.keycache_size) {
2630 DPRINTF(ah, ATH_DBG_FATAL,
2631 "keychache entry %u out of range\n", entry);
2636 macHi = (mac[5] << 8) | mac[4];
2637 macLo = (mac[3] << 24) |
2642 macLo |= (macHi & 1) << 31;
2647 REG_WRITE(ah, AR_KEYTABLE_MAC0(entry), macLo);
2648 REG_WRITE(ah, AR_KEYTABLE_MAC1(entry), macHi | AR_KEYTABLE_VALID);
2653 bool ath9k_hw_set_keycache_entry(struct ath_hw *ah, u16 entry,
2654 const struct ath9k_keyval *k,
2657 const struct ath9k_hw_capabilities *pCap = &ah->caps;
2658 u32 key0, key1, key2, key3, key4;
2661 if (entry >= pCap->keycache_size) {
2662 DPRINTF(ah, ATH_DBG_FATAL,
2663 "keycache entry %u out of range\n", entry);
2667 switch (k->kv_type) {
2668 case ATH9K_CIPHER_AES_OCB:
2669 keyType = AR_KEYTABLE_TYPE_AES;
2671 case ATH9K_CIPHER_AES_CCM:
2672 if (!(pCap->hw_caps & ATH9K_HW_CAP_CIPHER_AESCCM)) {
2673 DPRINTF(ah, ATH_DBG_ANY,
2674 "AES-CCM not supported by mac rev 0x%x\n",
2675 ah->hw_version.macRev);
2678 keyType = AR_KEYTABLE_TYPE_CCM;
2680 case ATH9K_CIPHER_TKIP:
2681 keyType = AR_KEYTABLE_TYPE_TKIP;
2682 if (ATH9K_IS_MIC_ENABLED(ah)
2683 && entry + 64 >= pCap->keycache_size) {
2684 DPRINTF(ah, ATH_DBG_ANY,
2685 "entry %u inappropriate for TKIP\n", entry);
2689 case ATH9K_CIPHER_WEP:
2690 if (k->kv_len < WLAN_KEY_LEN_WEP40) {
2691 DPRINTF(ah, ATH_DBG_ANY,
2692 "WEP key length %u too small\n", k->kv_len);
2695 if (k->kv_len <= WLAN_KEY_LEN_WEP40)
2696 keyType = AR_KEYTABLE_TYPE_40;
2697 else if (k->kv_len <= WLAN_KEY_LEN_WEP104)
2698 keyType = AR_KEYTABLE_TYPE_104;
2700 keyType = AR_KEYTABLE_TYPE_128;
2702 case ATH9K_CIPHER_CLR:
2703 keyType = AR_KEYTABLE_TYPE_CLR;
2706 DPRINTF(ah, ATH_DBG_FATAL,
2707 "cipher %u not supported\n", k->kv_type);
2711 key0 = get_unaligned_le32(k->kv_val + 0);
2712 key1 = get_unaligned_le16(k->kv_val + 4);
2713 key2 = get_unaligned_le32(k->kv_val + 6);
2714 key3 = get_unaligned_le16(k->kv_val + 10);
2715 key4 = get_unaligned_le32(k->kv_val + 12);
2716 if (k->kv_len <= WLAN_KEY_LEN_WEP104)
2720 * Note: Key cache registers access special memory area that requires
2721 * two 32-bit writes to actually update the values in the internal
2722 * memory. Consequently, the exact order and pairs used here must be
2726 if (keyType == AR_KEYTABLE_TYPE_TKIP && ATH9K_IS_MIC_ENABLED(ah)) {
2727 u16 micentry = entry + 64;
2730 * Write inverted key[47:0] first to avoid Michael MIC errors
2731 * on frames that could be sent or received at the same time.
2732 * The correct key will be written in the end once everything
2735 REG_WRITE(ah, AR_KEYTABLE_KEY0(entry), ~key0);
2736 REG_WRITE(ah, AR_KEYTABLE_KEY1(entry), ~key1);
2738 /* Write key[95:48] */
2739 REG_WRITE(ah, AR_KEYTABLE_KEY2(entry), key2);
2740 REG_WRITE(ah, AR_KEYTABLE_KEY3(entry), key3);
2742 /* Write key[127:96] and key type */
2743 REG_WRITE(ah, AR_KEYTABLE_KEY4(entry), key4);
2744 REG_WRITE(ah, AR_KEYTABLE_TYPE(entry), keyType);
2746 /* Write MAC address for the entry */
2747 (void) ath9k_hw_keysetmac(ah, entry, mac);
2749 if (ah->misc_mode & AR_PCU_MIC_NEW_LOC_ENA) {
2751 * TKIP uses two key cache entries:
2752 * Michael MIC TX/RX keys in the same key cache entry
2753 * (idx = main index + 64):
2754 * key0 [31:0] = RX key [31:0]
2755 * key1 [15:0] = TX key [31:16]
2756 * key1 [31:16] = reserved
2757 * key2 [31:0] = RX key [63:32]
2758 * key3 [15:0] = TX key [15:0]
2759 * key3 [31:16] = reserved
2760 * key4 [31:0] = TX key [63:32]
2762 u32 mic0, mic1, mic2, mic3, mic4;
2764 mic0 = get_unaligned_le32(k->kv_mic + 0);
2765 mic2 = get_unaligned_le32(k->kv_mic + 4);
2766 mic1 = get_unaligned_le16(k->kv_txmic + 2) & 0xffff;
2767 mic3 = get_unaligned_le16(k->kv_txmic + 0) & 0xffff;
2768 mic4 = get_unaligned_le32(k->kv_txmic + 4);
2770 /* Write RX[31:0] and TX[31:16] */
2771 REG_WRITE(ah, AR_KEYTABLE_KEY0(micentry), mic0);
2772 REG_WRITE(ah, AR_KEYTABLE_KEY1(micentry), mic1);
2774 /* Write RX[63:32] and TX[15:0] */
2775 REG_WRITE(ah, AR_KEYTABLE_KEY2(micentry), mic2);
2776 REG_WRITE(ah, AR_KEYTABLE_KEY3(micentry), mic3);
2778 /* Write TX[63:32] and keyType(reserved) */
2779 REG_WRITE(ah, AR_KEYTABLE_KEY4(micentry), mic4);
2780 REG_WRITE(ah, AR_KEYTABLE_TYPE(micentry),
2781 AR_KEYTABLE_TYPE_CLR);
2785 * TKIP uses four key cache entries (two for group
2787 * Michael MIC TX/RX keys are in different key cache
2788 * entries (idx = main index + 64 for TX and
2789 * main index + 32 + 96 for RX):
2790 * key0 [31:0] = TX/RX MIC key [31:0]
2791 * key1 [31:0] = reserved
2792 * key2 [31:0] = TX/RX MIC key [63:32]
2793 * key3 [31:0] = reserved
2794 * key4 [31:0] = reserved
2796 * Upper layer code will call this function separately
2797 * for TX and RX keys when these registers offsets are
2802 mic0 = get_unaligned_le32(k->kv_mic + 0);
2803 mic2 = get_unaligned_le32(k->kv_mic + 4);
2805 /* Write MIC key[31:0] */
2806 REG_WRITE(ah, AR_KEYTABLE_KEY0(micentry), mic0);
2807 REG_WRITE(ah, AR_KEYTABLE_KEY1(micentry), 0);
2809 /* Write MIC key[63:32] */
2810 REG_WRITE(ah, AR_KEYTABLE_KEY2(micentry), mic2);
2811 REG_WRITE(ah, AR_KEYTABLE_KEY3(micentry), 0);
2813 /* Write TX[63:32] and keyType(reserved) */
2814 REG_WRITE(ah, AR_KEYTABLE_KEY4(micentry), 0);
2815 REG_WRITE(ah, AR_KEYTABLE_TYPE(micentry),
2816 AR_KEYTABLE_TYPE_CLR);
2819 /* MAC address registers are reserved for the MIC entry */
2820 REG_WRITE(ah, AR_KEYTABLE_MAC0(micentry), 0);
2821 REG_WRITE(ah, AR_KEYTABLE_MAC1(micentry), 0);
2824 * Write the correct (un-inverted) key[47:0] last to enable
2825 * TKIP now that all other registers are set with correct
2828 REG_WRITE(ah, AR_KEYTABLE_KEY0(entry), key0);
2829 REG_WRITE(ah, AR_KEYTABLE_KEY1(entry), key1);
2831 /* Write key[47:0] */
2832 REG_WRITE(ah, AR_KEYTABLE_KEY0(entry), key0);
2833 REG_WRITE(ah, AR_KEYTABLE_KEY1(entry), key1);
2835 /* Write key[95:48] */
2836 REG_WRITE(ah, AR_KEYTABLE_KEY2(entry), key2);
2837 REG_WRITE(ah, AR_KEYTABLE_KEY3(entry), key3);
2839 /* Write key[127:96] and key type */
2840 REG_WRITE(ah, AR_KEYTABLE_KEY4(entry), key4);
2841 REG_WRITE(ah, AR_KEYTABLE_TYPE(entry), keyType);
2843 /* Write MAC address for the entry */
2844 (void) ath9k_hw_keysetmac(ah, entry, mac);
2850 bool ath9k_hw_keyisvalid(struct ath_hw *ah, u16 entry)
2852 if (entry < ah->caps.keycache_size) {
2853 u32 val = REG_READ(ah, AR_KEYTABLE_MAC1(entry));
2854 if (val & AR_KEYTABLE_VALID)
2860 /******************************/
2861 /* Power Management (Chipset) */
2862 /******************************/
2864 static void ath9k_set_power_sleep(struct ath_hw *ah, int setChip)
2866 REG_SET_BIT(ah, AR_STA_ID1, AR_STA_ID1_PWR_SAV);
2868 REG_CLR_BIT(ah, AR_RTC_FORCE_WAKE,
2869 AR_RTC_FORCE_WAKE_EN);
2870 if (!AR_SREV_9100(ah))
2871 REG_WRITE(ah, AR_RC, AR_RC_AHB | AR_RC_HOSTIF);
2873 REG_CLR_BIT(ah, (AR_RTC_RESET),
2878 static void ath9k_set_power_network_sleep(struct ath_hw *ah, int setChip)
2880 REG_SET_BIT(ah, AR_STA_ID1, AR_STA_ID1_PWR_SAV);
2882 struct ath9k_hw_capabilities *pCap = &ah->caps;
2884 if (!(pCap->hw_caps & ATH9K_HW_CAP_AUTOSLEEP)) {
2885 REG_WRITE(ah, AR_RTC_FORCE_WAKE,
2886 AR_RTC_FORCE_WAKE_ON_INT);
2888 REG_CLR_BIT(ah, AR_RTC_FORCE_WAKE,
2889 AR_RTC_FORCE_WAKE_EN);
2894 static bool ath9k_hw_set_power_awake(struct ath_hw *ah, int setChip)
2900 if ((REG_READ(ah, AR_RTC_STATUS) &
2901 AR_RTC_STATUS_M) == AR_RTC_STATUS_SHUTDOWN) {
2902 if (ath9k_hw_set_reset_reg(ah,
2903 ATH9K_RESET_POWER_ON) != true) {
2907 if (AR_SREV_9100(ah))
2908 REG_SET_BIT(ah, AR_RTC_RESET,
2911 REG_SET_BIT(ah, AR_RTC_FORCE_WAKE,
2912 AR_RTC_FORCE_WAKE_EN);
2915 for (i = POWER_UP_TIME / 50; i > 0; i--) {
2916 val = REG_READ(ah, AR_RTC_STATUS) & AR_RTC_STATUS_M;
2917 if (val == AR_RTC_STATUS_ON)
2920 REG_SET_BIT(ah, AR_RTC_FORCE_WAKE,
2921 AR_RTC_FORCE_WAKE_EN);
2924 DPRINTF(ah, ATH_DBG_FATAL,
2925 "Failed to wakeup in %uus\n", POWER_UP_TIME / 20);
2930 REG_CLR_BIT(ah, AR_STA_ID1, AR_STA_ID1_PWR_SAV);
2935 static bool ath9k_hw_setpower_nolock(struct ath_hw *ah,
2936 enum ath9k_power_mode mode)
2938 int status = true, setChip = true;
2939 static const char *modes[] = {
2946 if (ah->power_mode == mode)
2949 DPRINTF(ah, ATH_DBG_RESET, "%s -> %s\n",
2950 modes[ah->power_mode], modes[mode]);
2953 case ATH9K_PM_AWAKE:
2954 status = ath9k_hw_set_power_awake(ah, setChip);
2956 case ATH9K_PM_FULL_SLEEP:
2957 ath9k_set_power_sleep(ah, setChip);
2958 ah->chip_fullsleep = true;
2960 case ATH9K_PM_NETWORK_SLEEP:
2961 ath9k_set_power_network_sleep(ah, setChip);
2964 DPRINTF(ah, ATH_DBG_FATAL,
2965 "Unknown power mode %u\n", mode);
2968 ah->power_mode = mode;
2973 bool ath9k_hw_setpower(struct ath_hw *ah, enum ath9k_power_mode mode)
2975 unsigned long flags;
2978 spin_lock_irqsave(&ah->ah_sc->sc_pm_lock, flags);
2979 ret = ath9k_hw_setpower_nolock(ah, mode);
2980 spin_unlock_irqrestore(&ah->ah_sc->sc_pm_lock, flags);
2985 void ath9k_ps_wakeup(struct ath_softc *sc)
2987 unsigned long flags;
2989 spin_lock_irqsave(&sc->sc_pm_lock, flags);
2990 if (++sc->ps_usecount != 1)
2993 ath9k_hw_setpower_nolock(sc->sc_ah, ATH9K_PM_AWAKE);
2996 spin_unlock_irqrestore(&sc->sc_pm_lock, flags);
2999 void ath9k_ps_restore(struct ath_softc *sc)
3001 unsigned long flags;
3003 spin_lock_irqsave(&sc->sc_pm_lock, flags);
3004 if (--sc->ps_usecount != 0)
3007 if (sc->ps_enabled &&
3008 !(sc->sc_flags & (SC_OP_WAIT_FOR_BEACON |
3009 SC_OP_WAIT_FOR_CAB |
3010 SC_OP_WAIT_FOR_PSPOLL_DATA |
3011 SC_OP_WAIT_FOR_TX_ACK)))
3012 ath9k_hw_setpower_nolock(sc->sc_ah, ATH9K_PM_NETWORK_SLEEP);
3015 spin_unlock_irqrestore(&sc->sc_pm_lock, flags);
3019 * Helper for ASPM support.
3021 * Disable PLL when in L0s as well as receiver clock when in L1.
3022 * This power saving option must be enabled through the SerDes.
3024 * Programming the SerDes must go through the same 288 bit serial shift
3025 * register as the other analog registers. Hence the 9 writes.
3027 void ath9k_hw_configpcipowersave(struct ath_hw *ah, int restore, int power_off)
3032 if (ah->is_pciexpress != true)
3035 /* Do not touch SerDes registers */
3036 if (ah->config.pcie_powersave_enable == 2)
3039 /* Nothing to do on restore for 11N */
3041 if (AR_SREV_9280_20_OR_LATER(ah)) {
3043 * AR9280 2.0 or later chips use SerDes values from the
3044 * initvals.h initialized depending on chipset during
3047 for (i = 0; i < ah->iniPcieSerdes.ia_rows; i++) {
3048 REG_WRITE(ah, INI_RA(&ah->iniPcieSerdes, i, 0),
3049 INI_RA(&ah->iniPcieSerdes, i, 1));
3051 } else if (AR_SREV_9280(ah) &&
3052 (ah->hw_version.macRev == AR_SREV_REVISION_9280_10)) {
3053 REG_WRITE(ah, AR_PCIE_SERDES, 0x9248fd00);
3054 REG_WRITE(ah, AR_PCIE_SERDES, 0x24924924);
3056 /* RX shut off when elecidle is asserted */
3057 REG_WRITE(ah, AR_PCIE_SERDES, 0xa8000019);
3058 REG_WRITE(ah, AR_PCIE_SERDES, 0x13160820);
3059 REG_WRITE(ah, AR_PCIE_SERDES, 0xe5980560);
3061 /* Shut off CLKREQ active in L1 */
3062 if (ah->config.pcie_clock_req)
3063 REG_WRITE(ah, AR_PCIE_SERDES, 0x401deffc);
3065 REG_WRITE(ah, AR_PCIE_SERDES, 0x401deffd);
3067 REG_WRITE(ah, AR_PCIE_SERDES, 0x1aaabe40);
3068 REG_WRITE(ah, AR_PCIE_SERDES, 0xbe105554);
3069 REG_WRITE(ah, AR_PCIE_SERDES, 0x00043007);
3071 /* Load the new settings */
3072 REG_WRITE(ah, AR_PCIE_SERDES2, 0x00000000);
3075 REG_WRITE(ah, AR_PCIE_SERDES, 0x9248fc00);
3076 REG_WRITE(ah, AR_PCIE_SERDES, 0x24924924);
3078 /* RX shut off when elecidle is asserted */
3079 REG_WRITE(ah, AR_PCIE_SERDES, 0x28000039);
3080 REG_WRITE(ah, AR_PCIE_SERDES, 0x53160824);
3081 REG_WRITE(ah, AR_PCIE_SERDES, 0xe5980579);
3084 * Ignore ah->ah_config.pcie_clock_req setting for
3087 REG_WRITE(ah, AR_PCIE_SERDES, 0x001defff);
3089 REG_WRITE(ah, AR_PCIE_SERDES, 0x1aaabe40);
3090 REG_WRITE(ah, AR_PCIE_SERDES, 0xbe105554);
3091 REG_WRITE(ah, AR_PCIE_SERDES, 0x000e3007);
3093 /* Load the new settings */
3094 REG_WRITE(ah, AR_PCIE_SERDES2, 0x00000000);
3099 /* set bit 19 to allow forcing of pcie core into L1 state */
3100 REG_SET_BIT(ah, AR_PCIE_PM_CTRL, AR_PCIE_PM_CTRL_ENA);
3102 /* Several PCIe massages to ensure proper behaviour */
3103 if (ah->config.pcie_waen) {
3104 val = ah->config.pcie_waen;
3106 val &= (~AR_WA_D3_L1_DISABLE);
3108 if (AR_SREV_9285(ah) || AR_SREV_9271(ah) ||
3110 val = AR9285_WA_DEFAULT;
3112 val &= (~AR_WA_D3_L1_DISABLE);
3113 } else if (AR_SREV_9280(ah)) {
3115 * On AR9280 chips bit 22 of 0x4004 needs to be
3116 * set otherwise card may disappear.
3118 val = AR9280_WA_DEFAULT;
3120 val &= (~AR_WA_D3_L1_DISABLE);
3122 val = AR_WA_DEFAULT;
3125 REG_WRITE(ah, AR_WA, val);
3130 * Set PCIe workaround bits
3131 * bit 14 in WA register (disable L1) should only
3132 * be set when device enters D3 and be cleared
3133 * when device comes back to D0.
3135 if (ah->config.pcie_waen) {
3136 if (ah->config.pcie_waen & AR_WA_D3_L1_DISABLE)
3137 REG_SET_BIT(ah, AR_WA, AR_WA_D3_L1_DISABLE);
3139 if (((AR_SREV_9285(ah) || AR_SREV_9271(ah) ||
3140 AR_SREV_9287(ah)) &&
3141 (AR9285_WA_DEFAULT & AR_WA_D3_L1_DISABLE)) ||
3142 (AR_SREV_9280(ah) &&
3143 (AR9280_WA_DEFAULT & AR_WA_D3_L1_DISABLE))) {
3144 REG_SET_BIT(ah, AR_WA, AR_WA_D3_L1_DISABLE);
3150 /**********************/
3151 /* Interrupt Handling */
3152 /**********************/
3154 bool ath9k_hw_intrpend(struct ath_hw *ah)
3158 if (AR_SREV_9100(ah))
3161 host_isr = REG_READ(ah, AR_INTR_ASYNC_CAUSE);
3162 if ((host_isr & AR_INTR_MAC_IRQ) && (host_isr != AR_INTR_SPURIOUS))
3165 host_isr = REG_READ(ah, AR_INTR_SYNC_CAUSE);
3166 if ((host_isr & AR_INTR_SYNC_DEFAULT)
3167 && (host_isr != AR_INTR_SPURIOUS))
3173 bool ath9k_hw_getisr(struct ath_hw *ah, enum ath9k_int *masked)
3177 struct ath9k_hw_capabilities *pCap = &ah->caps;
3179 bool fatal_int = false;
3181 if (!AR_SREV_9100(ah)) {
3182 if (REG_READ(ah, AR_INTR_ASYNC_CAUSE) & AR_INTR_MAC_IRQ) {
3183 if ((REG_READ(ah, AR_RTC_STATUS) & AR_RTC_STATUS_M)
3184 == AR_RTC_STATUS_ON) {
3185 isr = REG_READ(ah, AR_ISR);
3189 sync_cause = REG_READ(ah, AR_INTR_SYNC_CAUSE) &
3190 AR_INTR_SYNC_DEFAULT;
3194 if (!isr && !sync_cause)
3198 isr = REG_READ(ah, AR_ISR);
3202 if (isr & AR_ISR_BCNMISC) {
3204 isr2 = REG_READ(ah, AR_ISR_S2);
3205 if (isr2 & AR_ISR_S2_TIM)
3206 mask2 |= ATH9K_INT_TIM;
3207 if (isr2 & AR_ISR_S2_DTIM)
3208 mask2 |= ATH9K_INT_DTIM;
3209 if (isr2 & AR_ISR_S2_DTIMSYNC)
3210 mask2 |= ATH9K_INT_DTIMSYNC;
3211 if (isr2 & (AR_ISR_S2_CABEND))
3212 mask2 |= ATH9K_INT_CABEND;
3213 if (isr2 & AR_ISR_S2_GTT)
3214 mask2 |= ATH9K_INT_GTT;
3215 if (isr2 & AR_ISR_S2_CST)
3216 mask2 |= ATH9K_INT_CST;
3217 if (isr2 & AR_ISR_S2_TSFOOR)
3218 mask2 |= ATH9K_INT_TSFOOR;
3221 isr = REG_READ(ah, AR_ISR_RAC);
3222 if (isr == 0xffffffff) {
3227 *masked = isr & ATH9K_INT_COMMON;
3229 if (ah->config.intr_mitigation) {
3230 if (isr & (AR_ISR_RXMINTR | AR_ISR_RXINTM))
3231 *masked |= ATH9K_INT_RX;
3234 if (isr & (AR_ISR_RXOK | AR_ISR_RXERR))
3235 *masked |= ATH9K_INT_RX;
3237 (AR_ISR_TXOK | AR_ISR_TXDESC | AR_ISR_TXERR |
3241 *masked |= ATH9K_INT_TX;
3243 s0_s = REG_READ(ah, AR_ISR_S0_S);
3244 ah->intr_txqs |= MS(s0_s, AR_ISR_S0_QCU_TXOK);
3245 ah->intr_txqs |= MS(s0_s, AR_ISR_S0_QCU_TXDESC);
3247 s1_s = REG_READ(ah, AR_ISR_S1_S);
3248 ah->intr_txqs |= MS(s1_s, AR_ISR_S1_QCU_TXERR);
3249 ah->intr_txqs |= MS(s1_s, AR_ISR_S1_QCU_TXEOL);
3252 if (isr & AR_ISR_RXORN) {
3253 DPRINTF(ah, ATH_DBG_INTERRUPT,
3254 "receive FIFO overrun interrupt\n");
3257 if (!AR_SREV_9100(ah)) {
3258 if (!(pCap->hw_caps & ATH9K_HW_CAP_AUTOSLEEP)) {
3259 u32 isr5 = REG_READ(ah, AR_ISR_S5_S);
3260 if (isr5 & AR_ISR_S5_TIM_TIMER)
3261 *masked |= ATH9K_INT_TIM_TIMER;
3268 if (AR_SREV_9100(ah))
3271 if (isr & AR_ISR_GENTMR) {
3274 s5_s = REG_READ(ah, AR_ISR_S5_S);
3275 if (isr & AR_ISR_GENTMR) {
3276 ah->intr_gen_timer_trigger =
3277 MS(s5_s, AR_ISR_S5_GENTIMER_TRIG);
3279 ah->intr_gen_timer_thresh =
3280 MS(s5_s, AR_ISR_S5_GENTIMER_THRESH);
3282 if (ah->intr_gen_timer_trigger)
3283 *masked |= ATH9K_INT_GENTIMER;
3291 (AR_INTR_SYNC_HOST1_FATAL | AR_INTR_SYNC_HOST1_PERR))
3295 if (sync_cause & AR_INTR_SYNC_HOST1_FATAL) {
3296 DPRINTF(ah, ATH_DBG_ANY,
3297 "received PCI FATAL interrupt\n");
3299 if (sync_cause & AR_INTR_SYNC_HOST1_PERR) {
3300 DPRINTF(ah, ATH_DBG_ANY,
3301 "received PCI PERR interrupt\n");
3303 *masked |= ATH9K_INT_FATAL;
3305 if (sync_cause & AR_INTR_SYNC_RADM_CPL_TIMEOUT) {
3306 DPRINTF(ah, ATH_DBG_INTERRUPT,
3307 "AR_INTR_SYNC_RADM_CPL_TIMEOUT\n");
3308 REG_WRITE(ah, AR_RC, AR_RC_HOSTIF);
3309 REG_WRITE(ah, AR_RC, 0);
3310 *masked |= ATH9K_INT_FATAL;
3312 if (sync_cause & AR_INTR_SYNC_LOCAL_TIMEOUT) {
3313 DPRINTF(ah, ATH_DBG_INTERRUPT,
3314 "AR_INTR_SYNC_LOCAL_TIMEOUT\n");
3317 REG_WRITE(ah, AR_INTR_SYNC_CAUSE_CLR, sync_cause);
3318 (void) REG_READ(ah, AR_INTR_SYNC_CAUSE_CLR);
3324 enum ath9k_int ath9k_hw_set_interrupts(struct ath_hw *ah, enum ath9k_int ints)
3326 u32 omask = ah->mask_reg;
3328 struct ath9k_hw_capabilities *pCap = &ah->caps;
3330 DPRINTF(ah, ATH_DBG_INTERRUPT, "0x%x => 0x%x\n", omask, ints);
3332 if (omask & ATH9K_INT_GLOBAL) {
3333 DPRINTF(ah, ATH_DBG_INTERRUPT, "disable IER\n");
3334 REG_WRITE(ah, AR_IER, AR_IER_DISABLE);
3335 (void) REG_READ(ah, AR_IER);
3336 if (!AR_SREV_9100(ah)) {
3337 REG_WRITE(ah, AR_INTR_ASYNC_ENABLE, 0);
3338 (void) REG_READ(ah, AR_INTR_ASYNC_ENABLE);
3340 REG_WRITE(ah, AR_INTR_SYNC_ENABLE, 0);
3341 (void) REG_READ(ah, AR_INTR_SYNC_ENABLE);
3345 mask = ints & ATH9K_INT_COMMON;
3348 if (ints & ATH9K_INT_TX) {
3349 if (ah->txok_interrupt_mask)
3350 mask |= AR_IMR_TXOK;
3351 if (ah->txdesc_interrupt_mask)
3352 mask |= AR_IMR_TXDESC;
3353 if (ah->txerr_interrupt_mask)
3354 mask |= AR_IMR_TXERR;
3355 if (ah->txeol_interrupt_mask)
3356 mask |= AR_IMR_TXEOL;
3358 if (ints & ATH9K_INT_RX) {
3359 mask |= AR_IMR_RXERR;
3360 if (ah->config.intr_mitigation)
3361 mask |= AR_IMR_RXMINTR | AR_IMR_RXINTM;
3363 mask |= AR_IMR_RXOK | AR_IMR_RXDESC;
3364 if (!(pCap->hw_caps & ATH9K_HW_CAP_AUTOSLEEP))
3365 mask |= AR_IMR_GENTMR;
3368 if (ints & (ATH9K_INT_BMISC)) {
3369 mask |= AR_IMR_BCNMISC;
3370 if (ints & ATH9K_INT_TIM)
3371 mask2 |= AR_IMR_S2_TIM;
3372 if (ints & ATH9K_INT_DTIM)
3373 mask2 |= AR_IMR_S2_DTIM;
3374 if (ints & ATH9K_INT_DTIMSYNC)
3375 mask2 |= AR_IMR_S2_DTIMSYNC;
3376 if (ints & ATH9K_INT_CABEND)
3377 mask2 |= AR_IMR_S2_CABEND;
3378 if (ints & ATH9K_INT_TSFOOR)
3379 mask2 |= AR_IMR_S2_TSFOOR;
3382 if (ints & (ATH9K_INT_GTT | ATH9K_INT_CST)) {
3383 mask |= AR_IMR_BCNMISC;
3384 if (ints & ATH9K_INT_GTT)
3385 mask2 |= AR_IMR_S2_GTT;
3386 if (ints & ATH9K_INT_CST)
3387 mask2 |= AR_IMR_S2_CST;
3390 DPRINTF(ah, ATH_DBG_INTERRUPT, "new IMR 0x%x\n", mask);
3391 REG_WRITE(ah, AR_IMR, mask);
3392 mask = REG_READ(ah, AR_IMR_S2) & ~(AR_IMR_S2_TIM |
3394 AR_IMR_S2_DTIMSYNC |
3398 AR_IMR_S2_GTT | AR_IMR_S2_CST);
3399 REG_WRITE(ah, AR_IMR_S2, mask | mask2);
3400 ah->mask_reg = ints;
3402 if (!(pCap->hw_caps & ATH9K_HW_CAP_AUTOSLEEP)) {
3403 if (ints & ATH9K_INT_TIM_TIMER)
3404 REG_SET_BIT(ah, AR_IMR_S5, AR_IMR_S5_TIM_TIMER);
3406 REG_CLR_BIT(ah, AR_IMR_S5, AR_IMR_S5_TIM_TIMER);
3409 if (ints & ATH9K_INT_GLOBAL) {
3410 DPRINTF(ah, ATH_DBG_INTERRUPT, "enable IER\n");
3411 REG_WRITE(ah, AR_IER, AR_IER_ENABLE);
3412 if (!AR_SREV_9100(ah)) {
3413 REG_WRITE(ah, AR_INTR_ASYNC_ENABLE,
3415 REG_WRITE(ah, AR_INTR_ASYNC_MASK, AR_INTR_MAC_IRQ);
3418 REG_WRITE(ah, AR_INTR_SYNC_ENABLE,
3419 AR_INTR_SYNC_DEFAULT);
3420 REG_WRITE(ah, AR_INTR_SYNC_MASK,
3421 AR_INTR_SYNC_DEFAULT);
3423 DPRINTF(ah, ATH_DBG_INTERRUPT, "AR_IMR 0x%x IER 0x%x\n",
3424 REG_READ(ah, AR_IMR), REG_READ(ah, AR_IER));
3430 /*******************/
3431 /* Beacon Handling */
3432 /*******************/
3434 void ath9k_hw_beaconinit(struct ath_hw *ah, u32 next_beacon, u32 beacon_period)
3438 ah->beacon_interval = beacon_period;
3440 switch (ah->opmode) {
3441 case NL80211_IFTYPE_STATION:
3442 case NL80211_IFTYPE_MONITOR:
3443 REG_WRITE(ah, AR_NEXT_TBTT_TIMER, TU_TO_USEC(next_beacon));
3444 REG_WRITE(ah, AR_NEXT_DMA_BEACON_ALERT, 0xffff);
3445 REG_WRITE(ah, AR_NEXT_SWBA, 0x7ffff);
3446 flags |= AR_TBTT_TIMER_EN;
3448 case NL80211_IFTYPE_ADHOC:
3449 case NL80211_IFTYPE_MESH_POINT:
3450 REG_SET_BIT(ah, AR_TXCFG,
3451 AR_TXCFG_ADHOC_BEACON_ATIM_TX_POLICY);
3452 REG_WRITE(ah, AR_NEXT_NDP_TIMER,
3453 TU_TO_USEC(next_beacon +
3454 (ah->atim_window ? ah->
3456 flags |= AR_NDP_TIMER_EN;
3457 case NL80211_IFTYPE_AP:
3458 REG_WRITE(ah, AR_NEXT_TBTT_TIMER, TU_TO_USEC(next_beacon));
3459 REG_WRITE(ah, AR_NEXT_DMA_BEACON_ALERT,
3460 TU_TO_USEC(next_beacon -
3462 dma_beacon_response_time));
3463 REG_WRITE(ah, AR_NEXT_SWBA,
3464 TU_TO_USEC(next_beacon -
3466 sw_beacon_response_time));
3468 AR_TBTT_TIMER_EN | AR_DBA_TIMER_EN | AR_SWBA_TIMER_EN;
3471 DPRINTF(ah, ATH_DBG_BEACON,
3472 "%s: unsupported opmode: %d\n",
3473 __func__, ah->opmode);
3478 REG_WRITE(ah, AR_BEACON_PERIOD, TU_TO_USEC(beacon_period));
3479 REG_WRITE(ah, AR_DMA_BEACON_PERIOD, TU_TO_USEC(beacon_period));
3480 REG_WRITE(ah, AR_SWBA_PERIOD, TU_TO_USEC(beacon_period));
3481 REG_WRITE(ah, AR_NDP_PERIOD, TU_TO_USEC(beacon_period));
3483 beacon_period &= ~ATH9K_BEACON_ENA;
3484 if (beacon_period & ATH9K_BEACON_RESET_TSF) {
3485 beacon_period &= ~ATH9K_BEACON_RESET_TSF;
3486 ath9k_hw_reset_tsf(ah);
3489 REG_SET_BIT(ah, AR_TIMER_MODE, flags);
3492 void ath9k_hw_set_sta_beacon_timers(struct ath_hw *ah,
3493 const struct ath9k_beacon_state *bs)
3495 u32 nextTbtt, beaconintval, dtimperiod, beacontimeout;
3496 struct ath9k_hw_capabilities *pCap = &ah->caps;
3498 REG_WRITE(ah, AR_NEXT_TBTT_TIMER, TU_TO_USEC(bs->bs_nexttbtt));
3500 REG_WRITE(ah, AR_BEACON_PERIOD,
3501 TU_TO_USEC(bs->bs_intval & ATH9K_BEACON_PERIOD));
3502 REG_WRITE(ah, AR_DMA_BEACON_PERIOD,
3503 TU_TO_USEC(bs->bs_intval & ATH9K_BEACON_PERIOD));
3505 REG_RMW_FIELD(ah, AR_RSSI_THR,