2 * Copyright (c) 2008-2009 Atheros Communications Inc.
4 * Permission to use, copy, modify, and/or distribute this software for any
5 * purpose with or without fee is hereby granted, provided that the above
6 * copyright notice and this permission notice appear in all copies.
8 * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
9 * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
10 * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
11 * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
12 * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
13 * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
14 * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
18 #include <asm/unaligned.h>
24 #define ATH9K_CLOCK_RATE_CCK 22
25 #define ATH9K_CLOCK_RATE_5GHZ_OFDM 40
26 #define ATH9K_CLOCK_RATE_2GHZ_OFDM 44
28 static bool ath9k_hw_set_reset_reg(struct ath_hw *ah, u32 type);
29 static void ath9k_hw_set_regs(struct ath_hw *ah, struct ath9k_channel *chan);
30 static u32 ath9k_hw_ini_fixup(struct ath_hw *ah,
31 struct ar5416_eeprom_def *pEepData,
33 static void ath9k_hw_9280_spur_mitigate(struct ath_hw *ah, struct ath9k_channel *chan);
34 static void ath9k_hw_spur_mitigate(struct ath_hw *ah, struct ath9k_channel *chan);
36 /********************/
37 /* Helper Functions */
38 /********************/
40 static u32 ath9k_hw_mac_usec(struct ath_hw *ah, u32 clks)
42 struct ieee80211_conf *conf = &ath9k_hw_common(ah)->hw->conf;
44 if (!ah->curchan) /* should really check for CCK instead */
45 return clks / ATH9K_CLOCK_RATE_CCK;
46 if (conf->channel->band == IEEE80211_BAND_2GHZ)
47 return clks / ATH9K_CLOCK_RATE_2GHZ_OFDM;
49 return clks / ATH9K_CLOCK_RATE_5GHZ_OFDM;
52 static u32 ath9k_hw_mac_to_usec(struct ath_hw *ah, u32 clks)
54 struct ieee80211_conf *conf = &ath9k_hw_common(ah)->hw->conf;
56 if (conf_is_ht40(conf))
57 return ath9k_hw_mac_usec(ah, clks) / 2;
59 return ath9k_hw_mac_usec(ah, clks);
62 static u32 ath9k_hw_mac_clks(struct ath_hw *ah, u32 usecs)
64 struct ieee80211_conf *conf = &ath9k_hw_common(ah)->hw->conf;
66 if (!ah->curchan) /* should really check for CCK instead */
67 return usecs *ATH9K_CLOCK_RATE_CCK;
68 if (conf->channel->band == IEEE80211_BAND_2GHZ)
69 return usecs *ATH9K_CLOCK_RATE_2GHZ_OFDM;
70 return usecs *ATH9K_CLOCK_RATE_5GHZ_OFDM;
73 static u32 ath9k_hw_mac_to_clks(struct ath_hw *ah, u32 usecs)
75 struct ieee80211_conf *conf = &ath9k_hw_common(ah)->hw->conf;
77 if (conf_is_ht40(conf))
78 return ath9k_hw_mac_clks(ah, usecs) * 2;
80 return ath9k_hw_mac_clks(ah, usecs);
83 bool ath9k_hw_wait(struct ath_hw *ah, u32 reg, u32 mask, u32 val, u32 timeout)
87 BUG_ON(timeout < AH_TIME_QUANTUM);
89 for (i = 0; i < (timeout / AH_TIME_QUANTUM); i++) {
90 if ((REG_READ(ah, reg) & mask) == val)
93 udelay(AH_TIME_QUANTUM);
96 ath_print(ath9k_hw_common(ah), ATH_DBG_ANY,
97 "timeout (%d us) on reg 0x%x: 0x%08x & 0x%08x != 0x%08x\n",
98 timeout, reg, REG_READ(ah, reg), mask, val);
103 u32 ath9k_hw_reverse_bits(u32 val, u32 n)
108 for (i = 0, retval = 0; i < n; i++) {
109 retval = (retval << 1) | (val & 1);
115 bool ath9k_get_channel_edges(struct ath_hw *ah,
119 struct ath9k_hw_capabilities *pCap = &ah->caps;
121 if (flags & CHANNEL_5GHZ) {
122 *low = pCap->low_5ghz_chan;
123 *high = pCap->high_5ghz_chan;
126 if ((flags & CHANNEL_2GHZ)) {
127 *low = pCap->low_2ghz_chan;
128 *high = pCap->high_2ghz_chan;
134 u16 ath9k_hw_computetxtime(struct ath_hw *ah,
135 const struct ath_rate_table *rates,
136 u32 frameLen, u16 rateix,
139 u32 bitsPerSymbol, numBits, numSymbols, phyTime, txTime;
142 kbps = rates->info[rateix].ratekbps;
147 switch (rates->info[rateix].phy) {
148 case WLAN_RC_PHY_CCK:
149 phyTime = CCK_PREAMBLE_BITS + CCK_PLCP_BITS;
150 if (shortPreamble && rates->info[rateix].short_preamble)
152 numBits = frameLen << 3;
153 txTime = CCK_SIFS_TIME + phyTime + ((numBits * 1000) / kbps);
155 case WLAN_RC_PHY_OFDM:
156 if (ah->curchan && IS_CHAN_QUARTER_RATE(ah->curchan)) {
157 bitsPerSymbol = (kbps * OFDM_SYMBOL_TIME_QUARTER) / 1000;
158 numBits = OFDM_PLCP_BITS + (frameLen << 3);
159 numSymbols = DIV_ROUND_UP(numBits, bitsPerSymbol);
160 txTime = OFDM_SIFS_TIME_QUARTER
161 + OFDM_PREAMBLE_TIME_QUARTER
162 + (numSymbols * OFDM_SYMBOL_TIME_QUARTER);
163 } else if (ah->curchan &&
164 IS_CHAN_HALF_RATE(ah->curchan)) {
165 bitsPerSymbol = (kbps * OFDM_SYMBOL_TIME_HALF) / 1000;
166 numBits = OFDM_PLCP_BITS + (frameLen << 3);
167 numSymbols = DIV_ROUND_UP(numBits, bitsPerSymbol);
168 txTime = OFDM_SIFS_TIME_HALF +
169 OFDM_PREAMBLE_TIME_HALF
170 + (numSymbols * OFDM_SYMBOL_TIME_HALF);
172 bitsPerSymbol = (kbps * OFDM_SYMBOL_TIME) / 1000;
173 numBits = OFDM_PLCP_BITS + (frameLen << 3);
174 numSymbols = DIV_ROUND_UP(numBits, bitsPerSymbol);
175 txTime = OFDM_SIFS_TIME + OFDM_PREAMBLE_TIME
176 + (numSymbols * OFDM_SYMBOL_TIME);
180 ath_print(ath9k_hw_common(ah), ATH_DBG_FATAL,
181 "Unknown phy %u (rate ix %u)\n",
182 rates->info[rateix].phy, rateix);
190 void ath9k_hw_get_channel_centers(struct ath_hw *ah,
191 struct ath9k_channel *chan,
192 struct chan_centers *centers)
196 if (!IS_CHAN_HT40(chan)) {
197 centers->ctl_center = centers->ext_center =
198 centers->synth_center = chan->channel;
202 if ((chan->chanmode == CHANNEL_A_HT40PLUS) ||
203 (chan->chanmode == CHANNEL_G_HT40PLUS)) {
204 centers->synth_center =
205 chan->channel + HT40_CHANNEL_CENTER_SHIFT;
208 centers->synth_center =
209 chan->channel - HT40_CHANNEL_CENTER_SHIFT;
213 centers->ctl_center =
214 centers->synth_center - (extoff * HT40_CHANNEL_CENTER_SHIFT);
215 /* 25 MHz spacing is supported by hw but not on upper layers */
216 centers->ext_center =
217 centers->synth_center + (extoff * HT40_CHANNEL_CENTER_SHIFT);
224 static void ath9k_hw_read_revisions(struct ath_hw *ah)
228 val = REG_READ(ah, AR_SREV) & AR_SREV_ID;
231 val = REG_READ(ah, AR_SREV);
232 ah->hw_version.macVersion =
233 (val & AR_SREV_VERSION2) >> AR_SREV_TYPE2_S;
234 ah->hw_version.macRev = MS(val, AR_SREV_REVISION2);
235 ah->is_pciexpress = (val & AR_SREV_TYPE2_HOST_MODE) ? 0 : 1;
237 if (!AR_SREV_9100(ah))
238 ah->hw_version.macVersion = MS(val, AR_SREV_VERSION);
240 ah->hw_version.macRev = val & AR_SREV_REVISION;
242 if (ah->hw_version.macVersion == AR_SREV_VERSION_5416_PCIE)
243 ah->is_pciexpress = true;
247 static int ath9k_hw_get_radiorev(struct ath_hw *ah)
252 REG_WRITE(ah, AR_PHY(0x36), 0x00007058);
254 for (i = 0; i < 8; i++)
255 REG_WRITE(ah, AR_PHY(0x20), 0x00010000);
256 val = (REG_READ(ah, AR_PHY(256)) >> 24) & 0xff;
257 val = ((val & 0xf0) >> 4) | ((val & 0x0f) << 4);
259 return ath9k_hw_reverse_bits(val, 8);
262 /************************************/
263 /* HW Attach, Detach, Init Routines */
264 /************************************/
266 static void ath9k_hw_disablepcie(struct ath_hw *ah)
268 if (AR_SREV_9100(ah))
271 REG_WRITE(ah, AR_PCIE_SERDES, 0x9248fc00);
272 REG_WRITE(ah, AR_PCIE_SERDES, 0x24924924);
273 REG_WRITE(ah, AR_PCIE_SERDES, 0x28000029);
274 REG_WRITE(ah, AR_PCIE_SERDES, 0x57160824);
275 REG_WRITE(ah, AR_PCIE_SERDES, 0x25980579);
276 REG_WRITE(ah, AR_PCIE_SERDES, 0x00000000);
277 REG_WRITE(ah, AR_PCIE_SERDES, 0x1aaabe40);
278 REG_WRITE(ah, AR_PCIE_SERDES, 0xbe105554);
279 REG_WRITE(ah, AR_PCIE_SERDES, 0x000e1007);
281 REG_WRITE(ah, AR_PCIE_SERDES2, 0x00000000);
284 static bool ath9k_hw_chip_test(struct ath_hw *ah)
286 struct ath_common *common = ath9k_hw_common(ah);
287 u32 regAddr[2] = { AR_STA_ID0, AR_PHY_BASE + (8 << 2) };
289 u32 patternData[4] = { 0x55555555,
295 for (i = 0; i < 2; i++) {
296 u32 addr = regAddr[i];
299 regHold[i] = REG_READ(ah, addr);
300 for (j = 0; j < 0x100; j++) {
301 wrData = (j << 16) | j;
302 REG_WRITE(ah, addr, wrData);
303 rdData = REG_READ(ah, addr);
304 if (rdData != wrData) {
305 ath_print(common, ATH_DBG_FATAL,
306 "address test failed "
307 "addr: 0x%08x - wr:0x%08x != "
309 addr, wrData, rdData);
313 for (j = 0; j < 4; j++) {
314 wrData = patternData[j];
315 REG_WRITE(ah, addr, wrData);
316 rdData = REG_READ(ah, addr);
317 if (wrData != rdData) {
318 ath_print(common, ATH_DBG_FATAL,
319 "address test failed "
320 "addr: 0x%08x - wr:0x%08x != "
322 addr, wrData, rdData);
326 REG_WRITE(ah, regAddr[i], regHold[i]);
333 static const char *ath9k_hw_devname(u16 devid)
336 case AR5416_DEVID_PCI:
337 return "Atheros 5416";
338 case AR5416_DEVID_PCIE:
339 return "Atheros 5418";
340 case AR9160_DEVID_PCI:
341 return "Atheros 9160";
342 case AR5416_AR9100_DEVID:
343 return "Atheros 9100";
344 case AR9280_DEVID_PCI:
345 case AR9280_DEVID_PCIE:
346 return "Atheros 9280";
347 case AR9285_DEVID_PCIE:
348 return "Atheros 9285";
349 case AR5416_DEVID_AR9287_PCI:
350 case AR5416_DEVID_AR9287_PCIE:
351 return "Atheros 9287";
357 static void ath9k_hw_init_config(struct ath_hw *ah)
361 ah->config.dma_beacon_response_time = 2;
362 ah->config.sw_beacon_response_time = 10;
363 ah->config.additional_swba_backoff = 0;
364 ah->config.ack_6mb = 0x0;
365 ah->config.cwm_ignore_extcca = 0;
366 ah->config.pcie_powersave_enable = 0;
367 ah->config.pcie_clock_req = 0;
368 ah->config.pcie_waen = 0;
369 ah->config.analog_shiftreg = 1;
370 ah->config.ht_enable = 1;
371 ah->config.ofdm_trig_low = 200;
372 ah->config.ofdm_trig_high = 500;
373 ah->config.cck_trig_high = 200;
374 ah->config.cck_trig_low = 100;
375 ah->config.enable_ani = 1;
376 ah->config.diversity_control = ATH9K_ANT_VARIABLE;
377 ah->config.antenna_switch_swap = 0;
379 for (i = 0; i < AR_EEPROM_MODAL_SPURS; i++) {
380 ah->config.spurchans[i][0] = AR_NO_SPUR;
381 ah->config.spurchans[i][1] = AR_NO_SPUR;
384 ah->config.intr_mitigation = true;
387 * We need this for PCI devices only (Cardbus, PCI, miniPCI)
388 * _and_ if on non-uniprocessor systems (Multiprocessor/HT).
389 * This means we use it for all AR5416 devices, and the few
390 * minor PCI AR9280 devices out there.
392 * Serialization is required because these devices do not handle
393 * well the case of two concurrent reads/writes due to the latency
394 * involved. During one read/write another read/write can be issued
395 * on another CPU while the previous read/write may still be working
396 * on our hardware, if we hit this case the hardware poops in a loop.
397 * We prevent this by serializing reads and writes.
399 * This issue is not present on PCI-Express devices or pre-AR5416
400 * devices (legacy, 802.11abg).
402 if (num_possible_cpus() > 1)
403 ah->config.serialize_regmode = SER_REG_MODE_AUTO;
406 static void ath9k_hw_init_defaults(struct ath_hw *ah)
408 struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah);
410 regulatory->country_code = CTRY_DEFAULT;
411 regulatory->power_limit = MAX_RATE_POWER;
412 regulatory->tp_scale = ATH9K_TP_SCALE_MAX;
414 ah->hw_version.magic = AR5416_MAGIC;
415 ah->hw_version.subvendorid = 0;
418 if (ah->hw_version.devid == AR5416_AR9100_DEVID)
419 ah->hw_version.macVersion = AR_SREV_VERSION_9100;
420 if (!AR_SREV_9100(ah))
421 ah->ah_flags = AH_USE_EEPROM;
424 ah->sta_id1_defaults = AR_STA_ID1_CRPT_MIC_ENABLE;
425 ah->beacon_interval = 100;
426 ah->enable_32kHz_clock = DONT_USE_32KHZ;
427 ah->slottime = (u32) -1;
428 ah->acktimeout = (u32) -1;
429 ah->ctstimeout = (u32) -1;
430 ah->globaltxtimeout = (u32) -1;
432 ah->gbeacon_rate = 0;
434 ah->power_mode = ATH9K_PM_UNDEFINED;
437 static int ath9k_hw_rfattach(struct ath_hw *ah)
439 bool rfStatus = false;
442 rfStatus = ath9k_hw_init_rf(ah, &ecode);
444 ath_print(ath9k_hw_common(ah), ATH_DBG_FATAL,
445 "RF setup failed, status: %u\n", ecode);
452 static int ath9k_hw_rf_claim(struct ath_hw *ah)
456 REG_WRITE(ah, AR_PHY(0), 0x00000007);
458 val = ath9k_hw_get_radiorev(ah);
459 switch (val & AR_RADIO_SREV_MAJOR) {
461 val = AR_RAD5133_SREV_MAJOR;
463 case AR_RAD5133_SREV_MAJOR:
464 case AR_RAD5122_SREV_MAJOR:
465 case AR_RAD2133_SREV_MAJOR:
466 case AR_RAD2122_SREV_MAJOR:
469 ath_print(ath9k_hw_common(ah), ATH_DBG_FATAL,
470 "Radio Chip Rev 0x%02X not supported\n",
471 val & AR_RADIO_SREV_MAJOR);
475 ah->hw_version.analog5GhzRev = val;
480 static int ath9k_hw_init_macaddr(struct ath_hw *ah)
482 struct ath_common *common = ath9k_hw_common(ah);
488 for (i = 0; i < 3; i++) {
489 eeval = ah->eep_ops->get_eeprom(ah, AR_EEPROM_MAC(i));
491 common->macaddr[2 * i] = eeval >> 8;
492 common->macaddr[2 * i + 1] = eeval & 0xff;
494 if (sum == 0 || sum == 0xffff * 3)
495 return -EADDRNOTAVAIL;
500 static void ath9k_hw_init_rxgain_ini(struct ath_hw *ah)
504 if (ah->eep_ops->get_eeprom(ah, EEP_MINOR_REV) >= AR5416_EEP_MINOR_VER_17) {
505 rxgain_type = ah->eep_ops->get_eeprom(ah, EEP_RXGAIN_TYPE);
507 if (rxgain_type == AR5416_EEP_RXGAIN_13DB_BACKOFF)
508 INIT_INI_ARRAY(&ah->iniModesRxGain,
509 ar9280Modes_backoff_13db_rxgain_9280_2,
510 ARRAY_SIZE(ar9280Modes_backoff_13db_rxgain_9280_2), 6);
511 else if (rxgain_type == AR5416_EEP_RXGAIN_23DB_BACKOFF)
512 INIT_INI_ARRAY(&ah->iniModesRxGain,
513 ar9280Modes_backoff_23db_rxgain_9280_2,
514 ARRAY_SIZE(ar9280Modes_backoff_23db_rxgain_9280_2), 6);
516 INIT_INI_ARRAY(&ah->iniModesRxGain,
517 ar9280Modes_original_rxgain_9280_2,
518 ARRAY_SIZE(ar9280Modes_original_rxgain_9280_2), 6);
520 INIT_INI_ARRAY(&ah->iniModesRxGain,
521 ar9280Modes_original_rxgain_9280_2,
522 ARRAY_SIZE(ar9280Modes_original_rxgain_9280_2), 6);
526 static void ath9k_hw_init_txgain_ini(struct ath_hw *ah)
530 if (ah->eep_ops->get_eeprom(ah, EEP_MINOR_REV) >= AR5416_EEP_MINOR_VER_19) {
531 txgain_type = ah->eep_ops->get_eeprom(ah, EEP_TXGAIN_TYPE);
533 if (txgain_type == AR5416_EEP_TXGAIN_HIGH_POWER)
534 INIT_INI_ARRAY(&ah->iniModesTxGain,
535 ar9280Modes_high_power_tx_gain_9280_2,
536 ARRAY_SIZE(ar9280Modes_high_power_tx_gain_9280_2), 6);
538 INIT_INI_ARRAY(&ah->iniModesTxGain,
539 ar9280Modes_original_tx_gain_9280_2,
540 ARRAY_SIZE(ar9280Modes_original_tx_gain_9280_2), 6);
542 INIT_INI_ARRAY(&ah->iniModesTxGain,
543 ar9280Modes_original_tx_gain_9280_2,
544 ARRAY_SIZE(ar9280Modes_original_tx_gain_9280_2), 6);
548 static int ath9k_hw_post_init(struct ath_hw *ah)
552 if (!ath9k_hw_chip_test(ah))
555 ecode = ath9k_hw_rf_claim(ah);
559 ecode = ath9k_hw_eeprom_init(ah);
563 ath_print(ath9k_hw_common(ah), ATH_DBG_CONFIG,
564 "Eeprom VER: %d, REV: %d\n",
565 ah->eep_ops->get_eeprom_ver(ah),
566 ah->eep_ops->get_eeprom_rev(ah));
568 ecode = ath9k_hw_rfattach(ah);
572 if (!AR_SREV_9100(ah)) {
573 ath9k_hw_ani_setup(ah);
574 ath9k_hw_ani_init(ah);
580 static bool ath9k_hw_devid_supported(u16 devid)
583 case AR5416_DEVID_PCI:
584 case AR5416_DEVID_PCIE:
585 case AR5416_AR9100_DEVID:
586 case AR9160_DEVID_PCI:
587 case AR9280_DEVID_PCI:
588 case AR9280_DEVID_PCIE:
589 case AR9285_DEVID_PCIE:
590 case AR5416_DEVID_AR9287_PCI:
591 case AR5416_DEVID_AR9287_PCIE:
599 static bool ath9k_hw_macversion_supported(u32 macversion)
601 switch (macversion) {
602 case AR_SREV_VERSION_5416_PCI:
603 case AR_SREV_VERSION_5416_PCIE:
604 case AR_SREV_VERSION_9160:
605 case AR_SREV_VERSION_9100:
606 case AR_SREV_VERSION_9280:
607 case AR_SREV_VERSION_9285:
608 case AR_SREV_VERSION_9287:
611 case AR_SREV_VERSION_9271:
618 static void ath9k_hw_init_cal_settings(struct ath_hw *ah)
620 if (AR_SREV_9160_10_OR_LATER(ah)) {
621 if (AR_SREV_9280_10_OR_LATER(ah)) {
622 ah->iq_caldata.calData = &iq_cal_single_sample;
623 ah->adcgain_caldata.calData =
624 &adc_gain_cal_single_sample;
625 ah->adcdc_caldata.calData =
626 &adc_dc_cal_single_sample;
627 ah->adcdc_calinitdata.calData =
630 ah->iq_caldata.calData = &iq_cal_multi_sample;
631 ah->adcgain_caldata.calData =
632 &adc_gain_cal_multi_sample;
633 ah->adcdc_caldata.calData =
634 &adc_dc_cal_multi_sample;
635 ah->adcdc_calinitdata.calData =
638 ah->supp_cals = ADC_GAIN_CAL | ADC_DC_CAL | IQ_MISMATCH_CAL;
642 static void ath9k_hw_init_mode_regs(struct ath_hw *ah)
644 if (AR_SREV_9271(ah)) {
645 INIT_INI_ARRAY(&ah->iniModes, ar9271Modes_9271_1_0,
646 ARRAY_SIZE(ar9271Modes_9271_1_0), 6);
647 INIT_INI_ARRAY(&ah->iniCommon, ar9271Common_9271_1_0,
648 ARRAY_SIZE(ar9271Common_9271_1_0), 2);
652 if (AR_SREV_9287_11_OR_LATER(ah)) {
653 INIT_INI_ARRAY(&ah->iniModes, ar9287Modes_9287_1_1,
654 ARRAY_SIZE(ar9287Modes_9287_1_1), 6);
655 INIT_INI_ARRAY(&ah->iniCommon, ar9287Common_9287_1_1,
656 ARRAY_SIZE(ar9287Common_9287_1_1), 2);
657 if (ah->config.pcie_clock_req)
658 INIT_INI_ARRAY(&ah->iniPcieSerdes,
659 ar9287PciePhy_clkreq_off_L1_9287_1_1,
660 ARRAY_SIZE(ar9287PciePhy_clkreq_off_L1_9287_1_1), 2);
662 INIT_INI_ARRAY(&ah->iniPcieSerdes,
663 ar9287PciePhy_clkreq_always_on_L1_9287_1_1,
664 ARRAY_SIZE(ar9287PciePhy_clkreq_always_on_L1_9287_1_1),
666 } else if (AR_SREV_9287_10_OR_LATER(ah)) {
667 INIT_INI_ARRAY(&ah->iniModes, ar9287Modes_9287_1_0,
668 ARRAY_SIZE(ar9287Modes_9287_1_0), 6);
669 INIT_INI_ARRAY(&ah->iniCommon, ar9287Common_9287_1_0,
670 ARRAY_SIZE(ar9287Common_9287_1_0), 2);
672 if (ah->config.pcie_clock_req)
673 INIT_INI_ARRAY(&ah->iniPcieSerdes,
674 ar9287PciePhy_clkreq_off_L1_9287_1_0,
675 ARRAY_SIZE(ar9287PciePhy_clkreq_off_L1_9287_1_0), 2);
677 INIT_INI_ARRAY(&ah->iniPcieSerdes,
678 ar9287PciePhy_clkreq_always_on_L1_9287_1_0,
679 ARRAY_SIZE(ar9287PciePhy_clkreq_always_on_L1_9287_1_0),
681 } else if (AR_SREV_9285_12_OR_LATER(ah)) {
684 INIT_INI_ARRAY(&ah->iniModes, ar9285Modes_9285_1_2,
685 ARRAY_SIZE(ar9285Modes_9285_1_2), 6);
686 INIT_INI_ARRAY(&ah->iniCommon, ar9285Common_9285_1_2,
687 ARRAY_SIZE(ar9285Common_9285_1_2), 2);
689 if (ah->config.pcie_clock_req) {
690 INIT_INI_ARRAY(&ah->iniPcieSerdes,
691 ar9285PciePhy_clkreq_off_L1_9285_1_2,
692 ARRAY_SIZE(ar9285PciePhy_clkreq_off_L1_9285_1_2), 2);
694 INIT_INI_ARRAY(&ah->iniPcieSerdes,
695 ar9285PciePhy_clkreq_always_on_L1_9285_1_2,
696 ARRAY_SIZE(ar9285PciePhy_clkreq_always_on_L1_9285_1_2),
699 } else if (AR_SREV_9285_10_OR_LATER(ah)) {
700 INIT_INI_ARRAY(&ah->iniModes, ar9285Modes_9285,
701 ARRAY_SIZE(ar9285Modes_9285), 6);
702 INIT_INI_ARRAY(&ah->iniCommon, ar9285Common_9285,
703 ARRAY_SIZE(ar9285Common_9285), 2);
705 if (ah->config.pcie_clock_req) {
706 INIT_INI_ARRAY(&ah->iniPcieSerdes,
707 ar9285PciePhy_clkreq_off_L1_9285,
708 ARRAY_SIZE(ar9285PciePhy_clkreq_off_L1_9285), 2);
710 INIT_INI_ARRAY(&ah->iniPcieSerdes,
711 ar9285PciePhy_clkreq_always_on_L1_9285,
712 ARRAY_SIZE(ar9285PciePhy_clkreq_always_on_L1_9285), 2);
714 } else if (AR_SREV_9280_20_OR_LATER(ah)) {
715 INIT_INI_ARRAY(&ah->iniModes, ar9280Modes_9280_2,
716 ARRAY_SIZE(ar9280Modes_9280_2), 6);
717 INIT_INI_ARRAY(&ah->iniCommon, ar9280Common_9280_2,
718 ARRAY_SIZE(ar9280Common_9280_2), 2);
720 if (ah->config.pcie_clock_req) {
721 INIT_INI_ARRAY(&ah->iniPcieSerdes,
722 ar9280PciePhy_clkreq_off_L1_9280,
723 ARRAY_SIZE(ar9280PciePhy_clkreq_off_L1_9280),2);
725 INIT_INI_ARRAY(&ah->iniPcieSerdes,
726 ar9280PciePhy_clkreq_always_on_L1_9280,
727 ARRAY_SIZE(ar9280PciePhy_clkreq_always_on_L1_9280), 2);
729 INIT_INI_ARRAY(&ah->iniModesAdditional,
730 ar9280Modes_fast_clock_9280_2,
731 ARRAY_SIZE(ar9280Modes_fast_clock_9280_2), 3);
732 } else if (AR_SREV_9280_10_OR_LATER(ah)) {
733 INIT_INI_ARRAY(&ah->iniModes, ar9280Modes_9280,
734 ARRAY_SIZE(ar9280Modes_9280), 6);
735 INIT_INI_ARRAY(&ah->iniCommon, ar9280Common_9280,
736 ARRAY_SIZE(ar9280Common_9280), 2);
737 } else if (AR_SREV_9160_10_OR_LATER(ah)) {
738 INIT_INI_ARRAY(&ah->iniModes, ar5416Modes_9160,
739 ARRAY_SIZE(ar5416Modes_9160), 6);
740 INIT_INI_ARRAY(&ah->iniCommon, ar5416Common_9160,
741 ARRAY_SIZE(ar5416Common_9160), 2);
742 INIT_INI_ARRAY(&ah->iniBank0, ar5416Bank0_9160,
743 ARRAY_SIZE(ar5416Bank0_9160), 2);
744 INIT_INI_ARRAY(&ah->iniBB_RfGain, ar5416BB_RfGain_9160,
745 ARRAY_SIZE(ar5416BB_RfGain_9160), 3);
746 INIT_INI_ARRAY(&ah->iniBank1, ar5416Bank1_9160,
747 ARRAY_SIZE(ar5416Bank1_9160), 2);
748 INIT_INI_ARRAY(&ah->iniBank2, ar5416Bank2_9160,
749 ARRAY_SIZE(ar5416Bank2_9160), 2);
750 INIT_INI_ARRAY(&ah->iniBank3, ar5416Bank3_9160,
751 ARRAY_SIZE(ar5416Bank3_9160), 3);
752 INIT_INI_ARRAY(&ah->iniBank6, ar5416Bank6_9160,
753 ARRAY_SIZE(ar5416Bank6_9160), 3);
754 INIT_INI_ARRAY(&ah->iniBank6TPC, ar5416Bank6TPC_9160,
755 ARRAY_SIZE(ar5416Bank6TPC_9160), 3);
756 INIT_INI_ARRAY(&ah->iniBank7, ar5416Bank7_9160,
757 ARRAY_SIZE(ar5416Bank7_9160), 2);
758 if (AR_SREV_9160_11(ah)) {
759 INIT_INI_ARRAY(&ah->iniAddac,
761 ARRAY_SIZE(ar5416Addac_91601_1), 2);
763 INIT_INI_ARRAY(&ah->iniAddac, ar5416Addac_9160,
764 ARRAY_SIZE(ar5416Addac_9160), 2);
766 } else if (AR_SREV_9100_OR_LATER(ah)) {
767 INIT_INI_ARRAY(&ah->iniModes, ar5416Modes_9100,
768 ARRAY_SIZE(ar5416Modes_9100), 6);
769 INIT_INI_ARRAY(&ah->iniCommon, ar5416Common_9100,
770 ARRAY_SIZE(ar5416Common_9100), 2);
771 INIT_INI_ARRAY(&ah->iniBank0, ar5416Bank0_9100,
772 ARRAY_SIZE(ar5416Bank0_9100), 2);
773 INIT_INI_ARRAY(&ah->iniBB_RfGain, ar5416BB_RfGain_9100,
774 ARRAY_SIZE(ar5416BB_RfGain_9100), 3);
775 INIT_INI_ARRAY(&ah->iniBank1, ar5416Bank1_9100,
776 ARRAY_SIZE(ar5416Bank1_9100), 2);
777 INIT_INI_ARRAY(&ah->iniBank2, ar5416Bank2_9100,
778 ARRAY_SIZE(ar5416Bank2_9100), 2);
779 INIT_INI_ARRAY(&ah->iniBank3, ar5416Bank3_9100,
780 ARRAY_SIZE(ar5416Bank3_9100), 3);
781 INIT_INI_ARRAY(&ah->iniBank6, ar5416Bank6_9100,
782 ARRAY_SIZE(ar5416Bank6_9100), 3);
783 INIT_INI_ARRAY(&ah->iniBank6TPC, ar5416Bank6TPC_9100,
784 ARRAY_SIZE(ar5416Bank6TPC_9100), 3);
785 INIT_INI_ARRAY(&ah->iniBank7, ar5416Bank7_9100,
786 ARRAY_SIZE(ar5416Bank7_9100), 2);
787 INIT_INI_ARRAY(&ah->iniAddac, ar5416Addac_9100,
788 ARRAY_SIZE(ar5416Addac_9100), 2);
790 INIT_INI_ARRAY(&ah->iniModes, ar5416Modes,
791 ARRAY_SIZE(ar5416Modes), 6);
792 INIT_INI_ARRAY(&ah->iniCommon, ar5416Common,
793 ARRAY_SIZE(ar5416Common), 2);
794 INIT_INI_ARRAY(&ah->iniBank0, ar5416Bank0,
795 ARRAY_SIZE(ar5416Bank0), 2);
796 INIT_INI_ARRAY(&ah->iniBB_RfGain, ar5416BB_RfGain,
797 ARRAY_SIZE(ar5416BB_RfGain), 3);
798 INIT_INI_ARRAY(&ah->iniBank1, ar5416Bank1,
799 ARRAY_SIZE(ar5416Bank1), 2);
800 INIT_INI_ARRAY(&ah->iniBank2, ar5416Bank2,
801 ARRAY_SIZE(ar5416Bank2), 2);
802 INIT_INI_ARRAY(&ah->iniBank3, ar5416Bank3,
803 ARRAY_SIZE(ar5416Bank3), 3);
804 INIT_INI_ARRAY(&ah->iniBank6, ar5416Bank6,
805 ARRAY_SIZE(ar5416Bank6), 3);
806 INIT_INI_ARRAY(&ah->iniBank6TPC, ar5416Bank6TPC,
807 ARRAY_SIZE(ar5416Bank6TPC), 3);
808 INIT_INI_ARRAY(&ah->iniBank7, ar5416Bank7,
809 ARRAY_SIZE(ar5416Bank7), 2);
810 INIT_INI_ARRAY(&ah->iniAddac, ar5416Addac,
811 ARRAY_SIZE(ar5416Addac), 2);
815 static void ath9k_hw_init_mode_gain_regs(struct ath_hw *ah)
817 if (AR_SREV_9287_11_OR_LATER(ah))
818 INIT_INI_ARRAY(&ah->iniModesRxGain,
819 ar9287Modes_rx_gain_9287_1_1,
820 ARRAY_SIZE(ar9287Modes_rx_gain_9287_1_1), 6);
821 else if (AR_SREV_9287_10(ah))
822 INIT_INI_ARRAY(&ah->iniModesRxGain,
823 ar9287Modes_rx_gain_9287_1_0,
824 ARRAY_SIZE(ar9287Modes_rx_gain_9287_1_0), 6);
825 else if (AR_SREV_9280_20(ah))
826 ath9k_hw_init_rxgain_ini(ah);
828 if (AR_SREV_9287_11_OR_LATER(ah)) {
829 INIT_INI_ARRAY(&ah->iniModesTxGain,
830 ar9287Modes_tx_gain_9287_1_1,
831 ARRAY_SIZE(ar9287Modes_tx_gain_9287_1_1), 6);
832 } else if (AR_SREV_9287_10(ah)) {
833 INIT_INI_ARRAY(&ah->iniModesTxGain,
834 ar9287Modes_tx_gain_9287_1_0,
835 ARRAY_SIZE(ar9287Modes_tx_gain_9287_1_0), 6);
836 } else if (AR_SREV_9280_20(ah)) {
837 ath9k_hw_init_txgain_ini(ah);
838 } else if (AR_SREV_9285_12_OR_LATER(ah)) {
839 u32 txgain_type = ah->eep_ops->get_eeprom(ah, EEP_TXGAIN_TYPE);
842 if (txgain_type == AR5416_EEP_TXGAIN_HIGH_POWER) {
843 INIT_INI_ARRAY(&ah->iniModesTxGain,
844 ar9285Modes_high_power_tx_gain_9285_1_2,
845 ARRAY_SIZE(ar9285Modes_high_power_tx_gain_9285_1_2), 6);
847 INIT_INI_ARRAY(&ah->iniModesTxGain,
848 ar9285Modes_original_tx_gain_9285_1_2,
849 ARRAY_SIZE(ar9285Modes_original_tx_gain_9285_1_2), 6);
855 static void ath9k_hw_init_11a_eeprom_fix(struct ath_hw *ah)
859 if ((ah->hw_version.devid == AR9280_DEVID_PCI) &&
860 test_bit(ATH9K_MODE_11A, ah->caps.wireless_modes)) {
863 for (i = 0; i < ah->iniModes.ia_rows; i++) {
864 u32 reg = INI_RA(&ah->iniModes, i, 0);
866 for (j = 1; j < ah->iniModes.ia_columns; j++) {
867 u32 val = INI_RA(&ah->iniModes, i, j);
869 INI_RA(&ah->iniModes, i, j) =
870 ath9k_hw_ini_fixup(ah,
878 int ath9k_hw_init(struct ath_hw *ah)
880 struct ath_common *common = ath9k_hw_common(ah);
883 if (!ath9k_hw_devid_supported(ah->hw_version.devid))
886 ath9k_hw_init_defaults(ah);
887 ath9k_hw_init_config(ah);
889 if (!ath9k_hw_set_reset_reg(ah, ATH9K_RESET_POWER_ON)) {
890 ath_print(common, ATH_DBG_FATAL,
891 "Couldn't reset chip\n");
895 if (!ath9k_hw_setpower(ah, ATH9K_PM_AWAKE)) {
896 ath_print(common, ATH_DBG_FATAL, "Couldn't wakeup chip\n");
900 if (ah->config.serialize_regmode == SER_REG_MODE_AUTO) {
901 if (ah->hw_version.macVersion == AR_SREV_VERSION_5416_PCI ||
902 (AR_SREV_9280(ah) && !ah->is_pciexpress)) {
903 ah->config.serialize_regmode =
906 ah->config.serialize_regmode =
911 ath_print(common, ATH_DBG_RESET, "serialize_regmode is %d\n",
912 ah->config.serialize_regmode);
914 if (!ath9k_hw_macversion_supported(ah->hw_version.macVersion)) {
915 ath_print(common, ATH_DBG_FATAL,
916 "Mac Chip Rev 0x%02x.%x is not supported by "
917 "this driver\n", ah->hw_version.macVersion,
918 ah->hw_version.macRev);
922 if (AR_SREV_9100(ah)) {
923 ah->iq_caldata.calData = &iq_cal_multi_sample;
924 ah->supp_cals = IQ_MISMATCH_CAL;
925 ah->is_pciexpress = false;
928 if (AR_SREV_9271(ah))
929 ah->is_pciexpress = false;
931 ah->hw_version.phyRev = REG_READ(ah, AR_PHY_CHIP_ID);
933 ath9k_hw_init_cal_settings(ah);
935 ah->ani_function = ATH9K_ANI_ALL;
936 if (AR_SREV_9280_10_OR_LATER(ah))
937 ah->ani_function &= ~ATH9K_ANI_NOISE_IMMUNITY_LEVEL;
939 ath9k_hw_init_mode_regs(ah);
941 if (ah->is_pciexpress)
942 ath9k_hw_configpcipowersave(ah, 0, 0);
944 ath9k_hw_disablepcie(ah);
946 /* Support for Japan ch.14 (2484) spread */
947 if (AR_SREV_9287_11_OR_LATER(ah)) {
948 INIT_INI_ARRAY(&ah->iniCckfirNormal,
949 ar9287Common_normal_cck_fir_coeff_92871_1,
950 ARRAY_SIZE(ar9287Common_normal_cck_fir_coeff_92871_1), 2);
951 INIT_INI_ARRAY(&ah->iniCckfirJapan2484,
952 ar9287Common_japan_2484_cck_fir_coeff_92871_1,
953 ARRAY_SIZE(ar9287Common_japan_2484_cck_fir_coeff_92871_1), 2);
956 r = ath9k_hw_post_init(ah);
960 ath9k_hw_init_mode_gain_regs(ah);
961 ath9k_hw_fill_cap_info(ah);
962 ath9k_hw_init_11a_eeprom_fix(ah);
964 r = ath9k_hw_init_macaddr(ah);
966 ath_print(common, ATH_DBG_FATAL,
967 "Failed to initialize MAC address\n");
971 if (AR_SREV_9285(ah) || AR_SREV_9271(ah))
972 ah->tx_trig_level = (AR_FTRIG_256B >> AR_FTRIG_S);
974 ah->tx_trig_level = (AR_FTRIG_512B >> AR_FTRIG_S);
976 ath9k_init_nfcal_hist_buffer(ah);
981 static void ath9k_hw_init_bb(struct ath_hw *ah,
982 struct ath9k_channel *chan)
986 synthDelay = REG_READ(ah, AR_PHY_RX_DELAY) & AR_PHY_RX_DELAY_DELAY;
988 synthDelay = (4 * synthDelay) / 22;
992 REG_WRITE(ah, AR_PHY_ACTIVE, AR_PHY_ACTIVE_EN);
994 udelay(synthDelay + BASE_ACTIVATE_DELAY);
997 static void ath9k_hw_init_qos(struct ath_hw *ah)
999 REG_WRITE(ah, AR_MIC_QOS_CONTROL, 0x100aa);
1000 REG_WRITE(ah, AR_MIC_QOS_SELECT, 0x3210);
1002 REG_WRITE(ah, AR_QOS_NO_ACK,
1003 SM(2, AR_QOS_NO_ACK_TWO_BIT) |
1004 SM(5, AR_QOS_NO_ACK_BIT_OFF) |
1005 SM(0, AR_QOS_NO_ACK_BYTE_OFF));
1007 REG_WRITE(ah, AR_TXOP_X, AR_TXOP_X_VAL);
1008 REG_WRITE(ah, AR_TXOP_0_3, 0xFFFFFFFF);
1009 REG_WRITE(ah, AR_TXOP_4_7, 0xFFFFFFFF);
1010 REG_WRITE(ah, AR_TXOP_8_11, 0xFFFFFFFF);
1011 REG_WRITE(ah, AR_TXOP_12_15, 0xFFFFFFFF);
1014 static void ath9k_hw_init_pll(struct ath_hw *ah,
1015 struct ath9k_channel *chan)
1019 if (AR_SREV_9100(ah)) {
1020 if (chan && IS_CHAN_5GHZ(chan))
1025 if (AR_SREV_9280_10_OR_LATER(ah)) {
1026 pll = SM(0x5, AR_RTC_9160_PLL_REFDIV);
1028 if (chan && IS_CHAN_HALF_RATE(chan))
1029 pll |= SM(0x1, AR_RTC_9160_PLL_CLKSEL);
1030 else if (chan && IS_CHAN_QUARTER_RATE(chan))
1031 pll |= SM(0x2, AR_RTC_9160_PLL_CLKSEL);
1033 if (chan && IS_CHAN_5GHZ(chan)) {
1034 pll |= SM(0x28, AR_RTC_9160_PLL_DIV);
1037 if (AR_SREV_9280_20(ah)) {
1038 if (((chan->channel % 20) == 0)
1039 || ((chan->channel % 10) == 0))
1045 pll |= SM(0x2c, AR_RTC_9160_PLL_DIV);
1048 } else if (AR_SREV_9160_10_OR_LATER(ah)) {
1050 pll = SM(0x5, AR_RTC_9160_PLL_REFDIV);
1052 if (chan && IS_CHAN_HALF_RATE(chan))
1053 pll |= SM(0x1, AR_RTC_9160_PLL_CLKSEL);
1054 else if (chan && IS_CHAN_QUARTER_RATE(chan))
1055 pll |= SM(0x2, AR_RTC_9160_PLL_CLKSEL);
1057 if (chan && IS_CHAN_5GHZ(chan))
1058 pll |= SM(0x50, AR_RTC_9160_PLL_DIV);
1060 pll |= SM(0x58, AR_RTC_9160_PLL_DIV);
1062 pll = AR_RTC_PLL_REFDIV_5 | AR_RTC_PLL_DIV2;
1064 if (chan && IS_CHAN_HALF_RATE(chan))
1065 pll |= SM(0x1, AR_RTC_PLL_CLKSEL);
1066 else if (chan && IS_CHAN_QUARTER_RATE(chan))
1067 pll |= SM(0x2, AR_RTC_PLL_CLKSEL);
1069 if (chan && IS_CHAN_5GHZ(chan))
1070 pll |= SM(0xa, AR_RTC_PLL_DIV);
1072 pll |= SM(0xb, AR_RTC_PLL_DIV);
1075 REG_WRITE(ah, AR_RTC_PLL_CONTROL, pll);
1077 udelay(RTC_PLL_SETTLE_DELAY);
1079 REG_WRITE(ah, AR_RTC_SLEEP_CLK, AR_RTC_FORCE_DERIVED_CLK);
1082 static void ath9k_hw_init_chain_masks(struct ath_hw *ah)
1084 int rx_chainmask, tx_chainmask;
1086 rx_chainmask = ah->rxchainmask;
1087 tx_chainmask = ah->txchainmask;
1089 switch (rx_chainmask) {
1091 REG_SET_BIT(ah, AR_PHY_ANALOG_SWAP,
1092 AR_PHY_SWAP_ALT_CHAIN);
1094 if (((ah)->hw_version.macVersion <= AR_SREV_VERSION_9160)) {
1095 REG_WRITE(ah, AR_PHY_RX_CHAINMASK, 0x7);
1096 REG_WRITE(ah, AR_PHY_CAL_CHAINMASK, 0x7);
1102 REG_WRITE(ah, AR_PHY_RX_CHAINMASK, rx_chainmask);
1103 REG_WRITE(ah, AR_PHY_CAL_CHAINMASK, rx_chainmask);
1109 REG_WRITE(ah, AR_SELFGEN_MASK, tx_chainmask);
1110 if (tx_chainmask == 0x5) {
1111 REG_SET_BIT(ah, AR_PHY_ANALOG_SWAP,
1112 AR_PHY_SWAP_ALT_CHAIN);
1114 if (AR_SREV_9100(ah))
1115 REG_WRITE(ah, AR_PHY_ANALOG_SWAP,
1116 REG_READ(ah, AR_PHY_ANALOG_SWAP) | 0x00000001);
1119 static void ath9k_hw_init_interrupt_masks(struct ath_hw *ah,
1120 enum nl80211_iftype opmode)
1122 ah->mask_reg = AR_IMR_TXERR |
1128 if (ah->config.intr_mitigation)
1129 ah->mask_reg |= AR_IMR_RXINTM | AR_IMR_RXMINTR;
1131 ah->mask_reg |= AR_IMR_RXOK;
1133 ah->mask_reg |= AR_IMR_TXOK;
1135 if (opmode == NL80211_IFTYPE_AP)
1136 ah->mask_reg |= AR_IMR_MIB;
1138 REG_WRITE(ah, AR_IMR, ah->mask_reg);
1139 REG_WRITE(ah, AR_IMR_S2, REG_READ(ah, AR_IMR_S2) | AR_IMR_S2_GTT);
1141 if (!AR_SREV_9100(ah)) {
1142 REG_WRITE(ah, AR_INTR_SYNC_CAUSE, 0xFFFFFFFF);
1143 REG_WRITE(ah, AR_INTR_SYNC_ENABLE, AR_INTR_SYNC_DEFAULT);
1144 REG_WRITE(ah, AR_INTR_SYNC_MASK, 0);
1148 static bool ath9k_hw_set_ack_timeout(struct ath_hw *ah, u32 us)
1150 if (us > ath9k_hw_mac_to_usec(ah, MS(0xffffffff, AR_TIME_OUT_ACK))) {
1151 ath_print(ath9k_hw_common(ah), ATH_DBG_RESET,
1152 "bad ack timeout %u\n", us);
1153 ah->acktimeout = (u32) -1;
1156 REG_RMW_FIELD(ah, AR_TIME_OUT,
1157 AR_TIME_OUT_ACK, ath9k_hw_mac_to_clks(ah, us));
1158 ah->acktimeout = us;
1163 static bool ath9k_hw_set_cts_timeout(struct ath_hw *ah, u32 us)
1165 if (us > ath9k_hw_mac_to_usec(ah, MS(0xffffffff, AR_TIME_OUT_CTS))) {
1166 ath_print(ath9k_hw_common(ah), ATH_DBG_RESET,
1167 "bad cts timeout %u\n", us);
1168 ah->ctstimeout = (u32) -1;
1171 REG_RMW_FIELD(ah, AR_TIME_OUT,
1172 AR_TIME_OUT_CTS, ath9k_hw_mac_to_clks(ah, us));
1173 ah->ctstimeout = us;
1178 static bool ath9k_hw_set_global_txtimeout(struct ath_hw *ah, u32 tu)
1181 ath_print(ath9k_hw_common(ah), ATH_DBG_XMIT,
1182 "bad global tx timeout %u\n", tu);
1183 ah->globaltxtimeout = (u32) -1;
1186 REG_RMW_FIELD(ah, AR_GTXTO, AR_GTXTO_TIMEOUT_LIMIT, tu);
1187 ah->globaltxtimeout = tu;
1192 static void ath9k_hw_init_user_settings(struct ath_hw *ah)
1194 ath_print(ath9k_hw_common(ah), ATH_DBG_RESET, "ah->misc_mode 0x%x\n",
1197 if (ah->misc_mode != 0)
1198 REG_WRITE(ah, AR_PCU_MISC,
1199 REG_READ(ah, AR_PCU_MISC) | ah->misc_mode);
1200 if (ah->slottime != (u32) -1)
1201 ath9k_hw_setslottime(ah, ah->slottime);
1202 if (ah->acktimeout != (u32) -1)
1203 ath9k_hw_set_ack_timeout(ah, ah->acktimeout);
1204 if (ah->ctstimeout != (u32) -1)
1205 ath9k_hw_set_cts_timeout(ah, ah->ctstimeout);
1206 if (ah->globaltxtimeout != (u32) -1)
1207 ath9k_hw_set_global_txtimeout(ah, ah->globaltxtimeout);
1210 const char *ath9k_hw_probe(u16 vendorid, u16 devid)
1212 return vendorid == ATHEROS_VENDOR_ID ?
1213 ath9k_hw_devname(devid) : NULL;
1216 void ath9k_hw_detach(struct ath_hw *ah)
1218 if (!AR_SREV_9100(ah))
1219 ath9k_hw_ani_disable(ah);
1221 ath9k_hw_rf_free(ah);
1222 ath9k_hw_setpower(ah, ATH9K_PM_FULL_SLEEP);
1231 static void ath9k_hw_override_ini(struct ath_hw *ah,
1232 struct ath9k_channel *chan)
1236 if (AR_SREV_9271(ah)) {
1238 * Enable spectral scan to solution for issues with stuck
1239 * beacons on AR9271 1.0. The beacon stuck issue is not seeon on
1242 if (AR_SREV_9271_10(ah)) {
1243 val = REG_READ(ah, AR_PHY_SPECTRAL_SCAN) | AR_PHY_SPECTRAL_SCAN_ENABLE;
1244 REG_WRITE(ah, AR_PHY_SPECTRAL_SCAN, val);
1246 else if (AR_SREV_9271_11(ah))
1248 * change AR_PHY_RF_CTL3 setting to fix MAC issue
1249 * present on AR9271 1.1
1251 REG_WRITE(ah, AR_PHY_RF_CTL3, 0x3a020001);
1256 * Set the RX_ABORT and RX_DIS and clear if off only after
1257 * RXE is set for MAC. This prevents frames with corrupted
1258 * descriptor status.
1260 REG_SET_BIT(ah, AR_DIAG_SW, (AR_DIAG_RX_DIS | AR_DIAG_RX_ABORT));
1262 if (AR_SREV_9280_10_OR_LATER(ah)) {
1263 val = REG_READ(ah, AR_PCU_MISC_MODE2) &
1264 (~AR_PCU_MISC_MODE2_HWWAR1);
1266 if (AR_SREV_9287_10_OR_LATER(ah))
1267 val = val & (~AR_PCU_MISC_MODE2_HWWAR2);
1269 REG_WRITE(ah, AR_PCU_MISC_MODE2, val);
1272 if (!AR_SREV_5416_20_OR_LATER(ah) ||
1273 AR_SREV_9280_10_OR_LATER(ah))
1276 * Disable BB clock gating
1277 * Necessary to avoid issues on AR5416 2.0
1279 REG_WRITE(ah, 0x9800 + (651 << 2), 0x11);
1282 static u32 ath9k_hw_def_ini_fixup(struct ath_hw *ah,
1283 struct ar5416_eeprom_def *pEepData,
1286 struct base_eep_header *pBase = &(pEepData->baseEepHeader);
1287 struct ath_common *common = ath9k_hw_common(ah);
1289 switch (ah->hw_version.devid) {
1290 case AR9280_DEVID_PCI:
1291 if (reg == 0x7894) {
1292 ath_print(common, ATH_DBG_EEPROM,
1293 "ini VAL: %x EEPROM: %x\n", value,
1294 (pBase->version & 0xff));
1296 if ((pBase->version & 0xff) > 0x0a) {
1297 ath_print(common, ATH_DBG_EEPROM,
1300 value &= ~AR_AN_TOP2_PWDCLKIND;
1301 value |= AR_AN_TOP2_PWDCLKIND &
1302 (pBase->pwdclkind << AR_AN_TOP2_PWDCLKIND_S);
1304 ath_print(common, ATH_DBG_EEPROM,
1305 "PWDCLKIND Earlier Rev\n");
1308 ath_print(common, ATH_DBG_EEPROM,
1309 "final ini VAL: %x\n", value);
1317 static u32 ath9k_hw_ini_fixup(struct ath_hw *ah,
1318 struct ar5416_eeprom_def *pEepData,
1321 if (ah->eep_map == EEP_MAP_4KBITS)
1324 return ath9k_hw_def_ini_fixup(ah, pEepData, reg, value);
1327 static void ath9k_olc_init(struct ath_hw *ah)
1331 if (OLC_FOR_AR9287_10_LATER) {
1332 REG_SET_BIT(ah, AR_PHY_TX_PWRCTRL9,
1333 AR_PHY_TX_PWRCTRL9_RES_DC_REMOVAL);
1334 ath9k_hw_analog_shift_rmw(ah, AR9287_AN_TXPC0,
1335 AR9287_AN_TXPC0_TXPCMODE,
1336 AR9287_AN_TXPC0_TXPCMODE_S,
1337 AR9287_AN_TXPC0_TXPCMODE_TEMPSENSE);
1340 for (i = 0; i < AR9280_TX_GAIN_TABLE_SIZE; i++)
1341 ah->originalGain[i] =
1342 MS(REG_READ(ah, AR_PHY_TX_GAIN_TBL1 + i * 4),
1348 static u32 ath9k_regd_get_ctl(struct ath_regulatory *reg,
1349 struct ath9k_channel *chan)
1351 u32 ctl = ath_regd_get_band_ctl(reg, chan->chan->band);
1353 if (IS_CHAN_B(chan))
1355 else if (IS_CHAN_G(chan))
1363 static int ath9k_hw_process_ini(struct ath_hw *ah,
1364 struct ath9k_channel *chan)
1366 struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah);
1367 int i, regWrites = 0;
1368 struct ieee80211_channel *channel = chan->chan;
1369 u32 modesIndex, freqIndex;
1371 switch (chan->chanmode) {
1373 case CHANNEL_A_HT20:
1377 case CHANNEL_A_HT40PLUS:
1378 case CHANNEL_A_HT40MINUS:
1383 case CHANNEL_G_HT20:
1388 case CHANNEL_G_HT40PLUS:
1389 case CHANNEL_G_HT40MINUS:
1398 REG_WRITE(ah, AR_PHY(0), 0x00000007);
1399 REG_WRITE(ah, AR_PHY_ADC_SERIAL_CTL, AR_PHY_SEL_EXTERNAL_RADIO);
1400 ah->eep_ops->set_addac(ah, chan);
1402 if (AR_SREV_5416_22_OR_LATER(ah)) {
1403 REG_WRITE_ARRAY(&ah->iniAddac, 1, regWrites);
1405 struct ar5416IniArray temp;
1407 sizeof(u32) * ah->iniAddac.ia_rows *
1408 ah->iniAddac.ia_columns;
1410 memcpy(ah->addac5416_21,
1411 ah->iniAddac.ia_array, addacSize);
1413 (ah->addac5416_21)[31 * ah->iniAddac.ia_columns + 1] = 0;
1415 temp.ia_array = ah->addac5416_21;
1416 temp.ia_columns = ah->iniAddac.ia_columns;
1417 temp.ia_rows = ah->iniAddac.ia_rows;
1418 REG_WRITE_ARRAY(&temp, 1, regWrites);
1421 REG_WRITE(ah, AR_PHY_ADC_SERIAL_CTL, AR_PHY_SEL_INTERNAL_ADDAC);
1423 for (i = 0; i < ah->iniModes.ia_rows; i++) {
1424 u32 reg = INI_RA(&ah->iniModes, i, 0);
1425 u32 val = INI_RA(&ah->iniModes, i, modesIndex);
1427 REG_WRITE(ah, reg, val);
1429 if (reg >= 0x7800 && reg < 0x78a0
1430 && ah->config.analog_shiftreg) {
1434 DO_DELAY(regWrites);
1437 if (AR_SREV_9280(ah) || AR_SREV_9287_10_OR_LATER(ah))
1438 REG_WRITE_ARRAY(&ah->iniModesRxGain, modesIndex, regWrites);
1440 if (AR_SREV_9280(ah) || AR_SREV_9285_12_OR_LATER(ah) ||
1441 AR_SREV_9287_10_OR_LATER(ah))
1442 REG_WRITE_ARRAY(&ah->iniModesTxGain, modesIndex, regWrites);
1444 for (i = 0; i < ah->iniCommon.ia_rows; i++) {
1445 u32 reg = INI_RA(&ah->iniCommon, i, 0);
1446 u32 val = INI_RA(&ah->iniCommon, i, 1);
1448 REG_WRITE(ah, reg, val);
1450 if (reg >= 0x7800 && reg < 0x78a0
1451 && ah->config.analog_shiftreg) {
1455 DO_DELAY(regWrites);
1458 ath9k_hw_write_regs(ah, modesIndex, freqIndex, regWrites);
1460 if (AR_SREV_9280_20(ah) && IS_CHAN_A_5MHZ_SPACED(chan)) {
1461 REG_WRITE_ARRAY(&ah->iniModesAdditional, modesIndex,
1465 ath9k_hw_override_ini(ah, chan);
1466 ath9k_hw_set_regs(ah, chan);
1467 ath9k_hw_init_chain_masks(ah);
1469 if (OLC_FOR_AR9280_20_LATER)
1472 ah->eep_ops->set_txpower(ah, chan,
1473 ath9k_regd_get_ctl(regulatory, chan),
1474 channel->max_antenna_gain * 2,
1475 channel->max_power * 2,
1476 min((u32) MAX_RATE_POWER,
1477 (u32) regulatory->power_limit));
1479 if (!ath9k_hw_set_rf_regs(ah, chan, freqIndex)) {
1480 ath_print(ath9k_hw_common(ah), ATH_DBG_FATAL,
1481 "ar5416SetRfRegs failed\n");
1488 /****************************************/
1489 /* Reset and Channel Switching Routines */
1490 /****************************************/
1492 static void ath9k_hw_set_rfmode(struct ath_hw *ah, struct ath9k_channel *chan)
1499 rfMode |= (IS_CHAN_B(chan) || IS_CHAN_G(chan))
1500 ? AR_PHY_MODE_DYNAMIC : AR_PHY_MODE_OFDM;
1502 if (!AR_SREV_9280_10_OR_LATER(ah))
1503 rfMode |= (IS_CHAN_5GHZ(chan)) ?
1504 AR_PHY_MODE_RF5GHZ : AR_PHY_MODE_RF2GHZ;
1506 if (AR_SREV_9280_20(ah) && IS_CHAN_A_5MHZ_SPACED(chan))
1507 rfMode |= (AR_PHY_MODE_DYNAMIC | AR_PHY_MODE_DYN_CCK_DISABLE);
1509 REG_WRITE(ah, AR_PHY_MODE, rfMode);
1512 static void ath9k_hw_mark_phy_inactive(struct ath_hw *ah)
1514 REG_WRITE(ah, AR_PHY_ACTIVE, AR_PHY_ACTIVE_DIS);
1517 static inline void ath9k_hw_set_dma(struct ath_hw *ah)
1522 * set AHB_MODE not to do cacheline prefetches
1524 regval = REG_READ(ah, AR_AHB_MODE);
1525 REG_WRITE(ah, AR_AHB_MODE, regval | AR_AHB_PREFETCH_RD_EN);
1528 * let mac dma reads be in 128 byte chunks
1530 regval = REG_READ(ah, AR_TXCFG) & ~AR_TXCFG_DMASZ_MASK;
1531 REG_WRITE(ah, AR_TXCFG, regval | AR_TXCFG_DMASZ_128B);
1534 * Restore TX Trigger Level to its pre-reset value.
1535 * The initial value depends on whether aggregation is enabled, and is
1536 * adjusted whenever underruns are detected.
1538 REG_RMW_FIELD(ah, AR_TXCFG, AR_FTRIG, ah->tx_trig_level);
1541 * let mac dma writes be in 128 byte chunks
1543 regval = REG_READ(ah, AR_RXCFG) & ~AR_RXCFG_DMASZ_MASK;
1544 REG_WRITE(ah, AR_RXCFG, regval | AR_RXCFG_DMASZ_128B);
1547 * Setup receive FIFO threshold to hold off TX activities
1549 REG_WRITE(ah, AR_RXFIFO_CFG, 0x200);
1552 * reduce the number of usable entries in PCU TXBUF to avoid
1553 * wrap around issues.
1555 if (AR_SREV_9285(ah)) {
1556 /* For AR9285 the number of Fifos are reduced to half.
1557 * So set the usable tx buf size also to half to
1558 * avoid data/delimiter underruns
1560 REG_WRITE(ah, AR_PCU_TXBUF_CTRL,
1561 AR_9285_PCU_TXBUF_CTRL_USABLE_SIZE);
1562 } else if (!AR_SREV_9271(ah)) {
1563 REG_WRITE(ah, AR_PCU_TXBUF_CTRL,
1564 AR_PCU_TXBUF_CTRL_USABLE_SIZE);
1568 static void ath9k_hw_set_operating_mode(struct ath_hw *ah, int opmode)
1572 val = REG_READ(ah, AR_STA_ID1);
1573 val &= ~(AR_STA_ID1_STA_AP | AR_STA_ID1_ADHOC);
1575 case NL80211_IFTYPE_AP:
1576 REG_WRITE(ah, AR_STA_ID1, val | AR_STA_ID1_STA_AP
1577 | AR_STA_ID1_KSRCH_MODE);
1578 REG_CLR_BIT(ah, AR_CFG, AR_CFG_AP_ADHOC_INDICATION);
1580 case NL80211_IFTYPE_ADHOC:
1581 case NL80211_IFTYPE_MESH_POINT:
1582 REG_WRITE(ah, AR_STA_ID1, val | AR_STA_ID1_ADHOC
1583 | AR_STA_ID1_KSRCH_MODE);
1584 REG_SET_BIT(ah, AR_CFG, AR_CFG_AP_ADHOC_INDICATION);
1586 case NL80211_IFTYPE_STATION:
1587 case NL80211_IFTYPE_MONITOR:
1588 REG_WRITE(ah, AR_STA_ID1, val | AR_STA_ID1_KSRCH_MODE);
1593 static inline void ath9k_hw_get_delta_slope_vals(struct ath_hw *ah,
1598 u32 coef_exp, coef_man;
1600 for (coef_exp = 31; coef_exp > 0; coef_exp--)
1601 if ((coef_scaled >> coef_exp) & 0x1)
1604 coef_exp = 14 - (coef_exp - COEF_SCALE_S);
1606 coef_man = coef_scaled + (1 << (COEF_SCALE_S - coef_exp - 1));
1608 *coef_mantissa = coef_man >> (COEF_SCALE_S - coef_exp);
1609 *coef_exponent = coef_exp - 16;
1612 static void ath9k_hw_set_delta_slope(struct ath_hw *ah,
1613 struct ath9k_channel *chan)
1615 u32 coef_scaled, ds_coef_exp, ds_coef_man;
1616 u32 clockMhzScaled = 0x64000000;
1617 struct chan_centers centers;
1619 if (IS_CHAN_HALF_RATE(chan))
1620 clockMhzScaled = clockMhzScaled >> 1;
1621 else if (IS_CHAN_QUARTER_RATE(chan))
1622 clockMhzScaled = clockMhzScaled >> 2;
1624 ath9k_hw_get_channel_centers(ah, chan, ¢ers);
1625 coef_scaled = clockMhzScaled / centers.synth_center;
1627 ath9k_hw_get_delta_slope_vals(ah, coef_scaled, &ds_coef_man,
1630 REG_RMW_FIELD(ah, AR_PHY_TIMING3,
1631 AR_PHY_TIMING3_DSC_MAN, ds_coef_man);
1632 REG_RMW_FIELD(ah, AR_PHY_TIMING3,
1633 AR_PHY_TIMING3_DSC_EXP, ds_coef_exp);
1635 coef_scaled = (9 * coef_scaled) / 10;
1637 ath9k_hw_get_delta_slope_vals(ah, coef_scaled, &ds_coef_man,
1640 REG_RMW_FIELD(ah, AR_PHY_HALFGI,
1641 AR_PHY_HALFGI_DSC_MAN, ds_coef_man);
1642 REG_RMW_FIELD(ah, AR_PHY_HALFGI,
1643 AR_PHY_HALFGI_DSC_EXP, ds_coef_exp);
1646 static bool ath9k_hw_set_reset(struct ath_hw *ah, int type)
1651 if (AR_SREV_9100(ah)) {
1652 u32 val = REG_READ(ah, AR_RTC_DERIVED_CLK);
1653 val &= ~AR_RTC_DERIVED_CLK_PERIOD;
1654 val |= SM(1, AR_RTC_DERIVED_CLK_PERIOD);
1655 REG_WRITE(ah, AR_RTC_DERIVED_CLK, val);
1656 (void)REG_READ(ah, AR_RTC_DERIVED_CLK);
1659 REG_WRITE(ah, AR_RTC_FORCE_WAKE, AR_RTC_FORCE_WAKE_EN |
1660 AR_RTC_FORCE_WAKE_ON_INT);
1662 if (AR_SREV_9100(ah)) {
1663 rst_flags = AR_RTC_RC_MAC_WARM | AR_RTC_RC_MAC_COLD |
1664 AR_RTC_RC_COLD_RESET | AR_RTC_RC_WARM_RESET;
1666 tmpReg = REG_READ(ah, AR_INTR_SYNC_CAUSE);
1668 (AR_INTR_SYNC_LOCAL_TIMEOUT |
1669 AR_INTR_SYNC_RADM_CPL_TIMEOUT)) {
1670 REG_WRITE(ah, AR_INTR_SYNC_ENABLE, 0);
1671 REG_WRITE(ah, AR_RC, AR_RC_AHB | AR_RC_HOSTIF);
1673 REG_WRITE(ah, AR_RC, AR_RC_AHB);
1676 rst_flags = AR_RTC_RC_MAC_WARM;
1677 if (type == ATH9K_RESET_COLD)
1678 rst_flags |= AR_RTC_RC_MAC_COLD;
1681 REG_WRITE(ah, AR_RTC_RC, rst_flags);
1684 REG_WRITE(ah, AR_RTC_RC, 0);
1685 if (!ath9k_hw_wait(ah, AR_RTC_RC, AR_RTC_RC_M, 0, AH_WAIT_TIMEOUT)) {
1686 ath_print(ath9k_hw_common(ah), ATH_DBG_RESET,
1687 "RTC stuck in MAC reset\n");
1691 if (!AR_SREV_9100(ah))
1692 REG_WRITE(ah, AR_RC, 0);
1694 if (AR_SREV_9100(ah))
1700 static bool ath9k_hw_set_reset_power_on(struct ath_hw *ah)
1702 REG_WRITE(ah, AR_RTC_FORCE_WAKE, AR_RTC_FORCE_WAKE_EN |
1703 AR_RTC_FORCE_WAKE_ON_INT);
1705 if (!AR_SREV_9100(ah))
1706 REG_WRITE(ah, AR_RC, AR_RC_AHB);
1708 REG_WRITE(ah, AR_RTC_RESET, 0);
1711 if (!AR_SREV_9100(ah))
1712 REG_WRITE(ah, AR_RC, 0);
1714 REG_WRITE(ah, AR_RTC_RESET, 1);
1716 if (!ath9k_hw_wait(ah,
1721 ath_print(ath9k_hw_common(ah), ATH_DBG_RESET,
1722 "RTC not waking up\n");
1726 ath9k_hw_read_revisions(ah);
1728 return ath9k_hw_set_reset(ah, ATH9K_RESET_WARM);
1731 static bool ath9k_hw_set_reset_reg(struct ath_hw *ah, u32 type)
1733 REG_WRITE(ah, AR_RTC_FORCE_WAKE,
1734 AR_RTC_FORCE_WAKE_EN | AR_RTC_FORCE_WAKE_ON_INT);
1737 case ATH9K_RESET_POWER_ON:
1738 return ath9k_hw_set_reset_power_on(ah);
1739 case ATH9K_RESET_WARM:
1740 case ATH9K_RESET_COLD:
1741 return ath9k_hw_set_reset(ah, type);
1747 static void ath9k_hw_set_regs(struct ath_hw *ah, struct ath9k_channel *chan)
1750 u32 enableDacFifo = 0;
1752 if (AR_SREV_9285_10_OR_LATER(ah))
1753 enableDacFifo = (REG_READ(ah, AR_PHY_TURBO) &
1754 AR_PHY_FC_ENABLE_DAC_FIFO);
1756 phymode = AR_PHY_FC_HT_EN | AR_PHY_FC_SHORT_GI_40
1757 | AR_PHY_FC_SINGLE_HT_LTF1 | AR_PHY_FC_WALSH | enableDacFifo;
1759 if (IS_CHAN_HT40(chan)) {
1760 phymode |= AR_PHY_FC_DYN2040_EN;
1762 if ((chan->chanmode == CHANNEL_A_HT40PLUS) ||
1763 (chan->chanmode == CHANNEL_G_HT40PLUS))
1764 phymode |= AR_PHY_FC_DYN2040_PRI_CH;
1767 REG_WRITE(ah, AR_PHY_TURBO, phymode);
1769 ath9k_hw_set11nmac2040(ah);
1771 REG_WRITE(ah, AR_GTXTO, 25 << AR_GTXTO_TIMEOUT_LIMIT_S);
1772 REG_WRITE(ah, AR_CST, 0xF << AR_CST_TIMEOUT_LIMIT_S);
1775 static bool ath9k_hw_chip_reset(struct ath_hw *ah,
1776 struct ath9k_channel *chan)
1778 if (AR_SREV_9280(ah) && ah->eep_ops->get_eeprom(ah, EEP_OL_PWRCTRL)) {
1779 if (!ath9k_hw_set_reset_reg(ah, ATH9K_RESET_POWER_ON))
1781 } else if (!ath9k_hw_set_reset_reg(ah, ATH9K_RESET_WARM))
1784 if (!ath9k_hw_setpower(ah, ATH9K_PM_AWAKE))
1787 ah->chip_fullsleep = false;
1788 ath9k_hw_init_pll(ah, chan);
1789 ath9k_hw_set_rfmode(ah, chan);
1794 static bool ath9k_hw_channel_change(struct ath_hw *ah,
1795 struct ath9k_channel *chan)
1797 struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah);
1798 struct ath_common *common = ath9k_hw_common(ah);
1799 struct ieee80211_channel *channel = chan->chan;
1800 u32 synthDelay, qnum;
1802 for (qnum = 0; qnum < AR_NUM_QCU; qnum++) {
1803 if (ath9k_hw_numtxpending(ah, qnum)) {
1804 ath_print(common, ATH_DBG_QUEUE,
1805 "Transmit frames pending on "
1806 "queue %d\n", qnum);
1811 REG_WRITE(ah, AR_PHY_RFBUS_REQ, AR_PHY_RFBUS_REQ_EN);
1812 if (!ath9k_hw_wait(ah, AR_PHY_RFBUS_GRANT, AR_PHY_RFBUS_GRANT_EN,
1813 AR_PHY_RFBUS_GRANT_EN, AH_WAIT_TIMEOUT)) {
1814 ath_print(common, ATH_DBG_FATAL,
1815 "Could not kill baseband RX\n");
1819 ath9k_hw_set_regs(ah, chan);
1821 if (AR_SREV_9280_10_OR_LATER(ah)) {
1822 ath9k_hw_ar9280_set_channel(ah, chan);
1824 if (!(ath9k_hw_set_channel(ah, chan))) {
1825 ath_print(common, ATH_DBG_FATAL,
1826 "Failed to set channel\n");
1831 ah->eep_ops->set_txpower(ah, chan,
1832 ath9k_regd_get_ctl(regulatory, chan),
1833 channel->max_antenna_gain * 2,
1834 channel->max_power * 2,
1835 min((u32) MAX_RATE_POWER,
1836 (u32) regulatory->power_limit));
1838 synthDelay = REG_READ(ah, AR_PHY_RX_DELAY) & AR_PHY_RX_DELAY_DELAY;
1839 if (IS_CHAN_B(chan))
1840 synthDelay = (4 * synthDelay) / 22;
1844 udelay(synthDelay + BASE_ACTIVATE_DELAY);
1846 REG_WRITE(ah, AR_PHY_RFBUS_REQ, 0);
1848 if (IS_CHAN_OFDM(chan) || IS_CHAN_HT(chan))
1849 ath9k_hw_set_delta_slope(ah, chan);
1851 if (AR_SREV_9280_10_OR_LATER(ah))
1852 ath9k_hw_9280_spur_mitigate(ah, chan);
1854 ath9k_hw_spur_mitigate(ah, chan);
1856 if (!chan->oneTimeCalsDone)
1857 chan->oneTimeCalsDone = true;
1862 static void ath9k_hw_9280_spur_mitigate(struct ath_hw *ah, struct ath9k_channel *chan)
1864 int bb_spur = AR_NO_SPUR;
1867 int bb_spur_off, spur_subchannel_sd;
1869 int spur_delta_phase;
1871 int upper, lower, cur_vit_mask;
1874 int pilot_mask_reg[4] = { AR_PHY_TIMING7, AR_PHY_TIMING8,
1875 AR_PHY_PILOT_MASK_01_30, AR_PHY_PILOT_MASK_31_60
1877 int chan_mask_reg[4] = { AR_PHY_TIMING9, AR_PHY_TIMING10,
1878 AR_PHY_CHANNEL_MASK_01_30, AR_PHY_CHANNEL_MASK_31_60
1880 int inc[4] = { 0, 100, 0, 0 };
1881 struct chan_centers centers;
1888 bool is2GHz = IS_CHAN_2GHZ(chan);
1890 memset(&mask_m, 0, sizeof(int8_t) * 123);
1891 memset(&mask_p, 0, sizeof(int8_t) * 123);
1893 ath9k_hw_get_channel_centers(ah, chan, ¢ers);
1894 freq = centers.synth_center;
1896 ah->config.spurmode = SPUR_ENABLE_EEPROM;
1897 for (i = 0; i < AR_EEPROM_MODAL_SPURS; i++) {
1898 cur_bb_spur = ah->eep_ops->get_spur_channel(ah, i, is2GHz);
1901 cur_bb_spur = (cur_bb_spur / 10) + AR_BASE_FREQ_2GHZ;
1903 cur_bb_spur = (cur_bb_spur / 10) + AR_BASE_FREQ_5GHZ;
1905 if (AR_NO_SPUR == cur_bb_spur)
1907 cur_bb_spur = cur_bb_spur - freq;
1909 if (IS_CHAN_HT40(chan)) {
1910 if ((cur_bb_spur > -AR_SPUR_FEEQ_BOUND_HT40) &&
1911 (cur_bb_spur < AR_SPUR_FEEQ_BOUND_HT40)) {
1912 bb_spur = cur_bb_spur;
1915 } else if ((cur_bb_spur > -AR_SPUR_FEEQ_BOUND_HT20) &&
1916 (cur_bb_spur < AR_SPUR_FEEQ_BOUND_HT20)) {
1917 bb_spur = cur_bb_spur;
1922 if (AR_NO_SPUR == bb_spur) {
1923 REG_CLR_BIT(ah, AR_PHY_FORCE_CLKEN_CCK,
1924 AR_PHY_FORCE_CLKEN_CCK_MRC_MUX);
1927 REG_CLR_BIT(ah, AR_PHY_FORCE_CLKEN_CCK,
1928 AR_PHY_FORCE_CLKEN_CCK_MRC_MUX);
1931 bin = bb_spur * 320;
1933 tmp = REG_READ(ah, AR_PHY_TIMING_CTRL4(0));
1935 newVal = tmp | (AR_PHY_TIMING_CTRL4_ENABLE_SPUR_RSSI |
1936 AR_PHY_TIMING_CTRL4_ENABLE_SPUR_FILTER |
1937 AR_PHY_TIMING_CTRL4_ENABLE_CHAN_MASK |
1938 AR_PHY_TIMING_CTRL4_ENABLE_PILOT_MASK);
1939 REG_WRITE(ah, AR_PHY_TIMING_CTRL4(0), newVal);
1941 newVal = (AR_PHY_SPUR_REG_MASK_RATE_CNTL |
1942 AR_PHY_SPUR_REG_ENABLE_MASK_PPM |
1943 AR_PHY_SPUR_REG_MASK_RATE_SELECT |
1944 AR_PHY_SPUR_REG_ENABLE_VIT_SPUR_RSSI |
1945 SM(SPUR_RSSI_THRESH, AR_PHY_SPUR_REG_SPUR_RSSI_THRESH));
1946 REG_WRITE(ah, AR_PHY_SPUR_REG, newVal);
1948 if (IS_CHAN_HT40(chan)) {
1950 spur_subchannel_sd = 1;
1951 bb_spur_off = bb_spur + 10;
1953 spur_subchannel_sd = 0;
1954 bb_spur_off = bb_spur - 10;
1957 spur_subchannel_sd = 0;
1958 bb_spur_off = bb_spur;
1961 if (IS_CHAN_HT40(chan))
1963 ((bb_spur * 262144) /
1964 10) & AR_PHY_TIMING11_SPUR_DELTA_PHASE;
1967 ((bb_spur * 524288) /
1968 10) & AR_PHY_TIMING11_SPUR_DELTA_PHASE;
1970 denominator = IS_CHAN_2GHZ(chan) ? 44 : 40;
1971 spur_freq_sd = ((bb_spur_off * 2048) / denominator) & 0x3ff;
1973 newVal = (AR_PHY_TIMING11_USE_SPUR_IN_AGC |
1974 SM(spur_freq_sd, AR_PHY_TIMING11_SPUR_FREQ_SD) |
1975 SM(spur_delta_phase, AR_PHY_TIMING11_SPUR_DELTA_PHASE));
1976 REG_WRITE(ah, AR_PHY_TIMING11, newVal);
1978 newVal = spur_subchannel_sd << AR_PHY_SFCORR_SPUR_SUBCHNL_SD_S;
1979 REG_WRITE(ah, AR_PHY_SFCORR_EXT, newVal);
1985 for (i = 0; i < 4; i++) {
1989 for (bp = 0; bp < 30; bp++) {
1990 if ((cur_bin > lower) && (cur_bin < upper)) {
1991 pilot_mask = pilot_mask | 0x1 << bp;
1992 chan_mask = chan_mask | 0x1 << bp;
1997 REG_WRITE(ah, pilot_mask_reg[i], pilot_mask);
1998 REG_WRITE(ah, chan_mask_reg[i], chan_mask);
2001 cur_vit_mask = 6100;
2005 for (i = 0; i < 123; i++) {
2006 if ((cur_vit_mask > lower) && (cur_vit_mask < upper)) {
2008 /* workaround for gcc bug #37014 */
2009 volatile int tmp_v = abs(cur_vit_mask - bin);
2015 if (cur_vit_mask < 0)
2016 mask_m[abs(cur_vit_mask / 100)] = mask_amt;
2018 mask_p[cur_vit_mask / 100] = mask_amt;
2020 cur_vit_mask -= 100;
2023 tmp_mask = (mask_m[46] << 30) | (mask_m[47] << 28)
2024 | (mask_m[48] << 26) | (mask_m[49] << 24)
2025 | (mask_m[50] << 22) | (mask_m[51] << 20)
2026 | (mask_m[52] << 18) | (mask_m[53] << 16)
2027 | (mask_m[54] << 14) | (mask_m[55] << 12)
2028 | (mask_m[56] << 10) | (mask_m[57] << 8)
2029 | (mask_m[58] << 6) | (mask_m[59] << 4)
2030 | (mask_m[60] << 2) | (mask_m[61] << 0);
2031 REG_WRITE(ah, AR_PHY_BIN_MASK_1, tmp_mask);
2032 REG_WRITE(ah, AR_PHY_VIT_MASK2_M_46_61, tmp_mask);
2034 tmp_mask = (mask_m[31] << 28)
2035 | (mask_m[32] << 26) | (mask_m[33] << 24)
2036 | (mask_m[34] << 22) | (mask_m[35] << 20)
2037 | (mask_m[36] << 18) | (mask_m[37] << 16)
2038 | (mask_m[48] << 14) | (mask_m[39] << 12)
2039 | (mask_m[40] << 10) | (mask_m[41] << 8)
2040 | (mask_m[42] << 6) | (mask_m[43] << 4)
2041 | (mask_m[44] << 2) | (mask_m[45] << 0);
2042 REG_WRITE(ah, AR_PHY_BIN_MASK_2, tmp_mask);
2043 REG_WRITE(ah, AR_PHY_MASK2_M_31_45, tmp_mask);
2045 tmp_mask = (mask_m[16] << 30) | (mask_m[16] << 28)
2046 | (mask_m[18] << 26) | (mask_m[18] << 24)
2047 | (mask_m[20] << 22) | (mask_m[20] << 20)
2048 | (mask_m[22] << 18) | (mask_m[22] << 16)
2049 | (mask_m[24] << 14) | (mask_m[24] << 12)
2050 | (mask_m[25] << 10) | (mask_m[26] << 8)
2051 | (mask_m[27] << 6) | (mask_m[28] << 4)
2052 | (mask_m[29] << 2) | (mask_m[30] << 0);
2053 REG_WRITE(ah, AR_PHY_BIN_MASK_3, tmp_mask);
2054 REG_WRITE(ah, AR_PHY_MASK2_M_16_30, tmp_mask);
2056 tmp_mask = (mask_m[0] << 30) | (mask_m[1] << 28)
2057 | (mask_m[2] << 26) | (mask_m[3] << 24)
2058 | (mask_m[4] << 22) | (mask_m[5] << 20)
2059 | (mask_m[6] << 18) | (mask_m[7] << 16)
2060 | (mask_m[8] << 14) | (mask_m[9] << 12)
2061 | (mask_m[10] << 10) | (mask_m[11] << 8)
2062 | (mask_m[12] << 6) | (mask_m[13] << 4)
2063 | (mask_m[14] << 2) | (mask_m[15] << 0);
2064 REG_WRITE(ah, AR_PHY_MASK_CTL, tmp_mask);
2065 REG_WRITE(ah, AR_PHY_MASK2_M_00_15, tmp_mask);
2067 tmp_mask = (mask_p[15] << 28)
2068 | (mask_p[14] << 26) | (mask_p[13] << 24)
2069 | (mask_p[12] << 22) | (mask_p[11] << 20)
2070 | (mask_p[10] << 18) | (mask_p[9] << 16)
2071 | (mask_p[8] << 14) | (mask_p[7] << 12)
2072 | (mask_p[6] << 10) | (mask_p[5] << 8)
2073 | (mask_p[4] << 6) | (mask_p[3] << 4)
2074 | (mask_p[2] << 2) | (mask_p[1] << 0);
2075 REG_WRITE(ah, AR_PHY_BIN_MASK2_1, tmp_mask);
2076 REG_WRITE(ah, AR_PHY_MASK2_P_15_01, tmp_mask);
2078 tmp_mask = (mask_p[30] << 28)
2079 | (mask_p[29] << 26) | (mask_p[28] << 24)
2080 | (mask_p[27] << 22) | (mask_p[26] << 20)
2081 | (mask_p[25] << 18) | (mask_p[24] << 16)
2082 | (mask_p[23] << 14) | (mask_p[22] << 12)
2083 | (mask_p[21] << 10) | (mask_p[20] << 8)
2084 | (mask_p[19] << 6) | (mask_p[18] << 4)
2085 | (mask_p[17] << 2) | (mask_p[16] << 0);
2086 REG_WRITE(ah, AR_PHY_BIN_MASK2_2, tmp_mask);
2087 REG_WRITE(ah, AR_PHY_MASK2_P_30_16, tmp_mask);
2089 tmp_mask = (mask_p[45] << 28)
2090 | (mask_p[44] << 26) | (mask_p[43] << 24)
2091 | (mask_p[42] << 22) | (mask_p[41] << 20)
2092 | (mask_p[40] << 18) | (mask_p[39] << 16)
2093 | (mask_p[38] << 14) | (mask_p[37] << 12)
2094 | (mask_p[36] << 10) | (mask_p[35] << 8)
2095 | (mask_p[34] << 6) | (mask_p[33] << 4)
2096 | (mask_p[32] << 2) | (mask_p[31] << 0);
2097 REG_WRITE(ah, AR_PHY_BIN_MASK2_3, tmp_mask);
2098 REG_WRITE(ah, AR_PHY_MASK2_P_45_31, tmp_mask);
2100 tmp_mask = (mask_p[61] << 30) | (mask_p[60] << 28)
2101 | (mask_p[59] << 26) | (mask_p[58] << 24)
2102 | (mask_p[57] << 22) | (mask_p[56] << 20)
2103 | (mask_p[55] << 18) | (mask_p[54] << 16)
2104 | (mask_p[53] << 14) | (mask_p[52] << 12)
2105 | (mask_p[51] << 10) | (mask_p[50] << 8)
2106 | (mask_p[49] << 6) | (mask_p[48] << 4)
2107 | (mask_p[47] << 2) | (mask_p[46] << 0);
2108 REG_WRITE(ah, AR_PHY_BIN_MASK2_4, tmp_mask);
2109 REG_WRITE(ah, AR_PHY_MASK2_P_61_45, tmp_mask);
2112 static void ath9k_hw_spur_mitigate(struct ath_hw *ah, struct ath9k_channel *chan)
2114 int bb_spur = AR_NO_SPUR;
2117 int spur_delta_phase;
2119 int upper, lower, cur_vit_mask;
2122 int pilot_mask_reg[4] = { AR_PHY_TIMING7, AR_PHY_TIMING8,
2123 AR_PHY_PILOT_MASK_01_30, AR_PHY_PILOT_MASK_31_60
2125 int chan_mask_reg[4] = { AR_PHY_TIMING9, AR_PHY_TIMING10,
2126 AR_PHY_CHANNEL_MASK_01_30, AR_PHY_CHANNEL_MASK_31_60
2128 int inc[4] = { 0, 100, 0, 0 };
2135 bool is2GHz = IS_CHAN_2GHZ(chan);
2137 memset(&mask_m, 0, sizeof(int8_t) * 123);
2138 memset(&mask_p, 0, sizeof(int8_t) * 123);
2140 for (i = 0; i < AR_EEPROM_MODAL_SPURS; i++) {
2141 cur_bb_spur = ah->eep_ops->get_spur_channel(ah, i, is2GHz);
2142 if (AR_NO_SPUR == cur_bb_spur)
2144 cur_bb_spur = cur_bb_spur - (chan->channel * 10);
2145 if ((cur_bb_spur > -95) && (cur_bb_spur < 95)) {
2146 bb_spur = cur_bb_spur;
2151 if (AR_NO_SPUR == bb_spur)
2156 tmp = REG_READ(ah, AR_PHY_TIMING_CTRL4(0));
2157 new = tmp | (AR_PHY_TIMING_CTRL4_ENABLE_SPUR_RSSI |
2158 AR_PHY_TIMING_CTRL4_ENABLE_SPUR_FILTER |
2159 AR_PHY_TIMING_CTRL4_ENABLE_CHAN_MASK |
2160 AR_PHY_TIMING_CTRL4_ENABLE_PILOT_MASK);
2162 REG_WRITE(ah, AR_PHY_TIMING_CTRL4(0), new);
2164 new = (AR_PHY_SPUR_REG_MASK_RATE_CNTL |
2165 AR_PHY_SPUR_REG_ENABLE_MASK_PPM |
2166 AR_PHY_SPUR_REG_MASK_RATE_SELECT |
2167 AR_PHY_SPUR_REG_ENABLE_VIT_SPUR_RSSI |
2168 SM(SPUR_RSSI_THRESH, AR_PHY_SPUR_REG_SPUR_RSSI_THRESH));
2169 REG_WRITE(ah, AR_PHY_SPUR_REG, new);
2171 spur_delta_phase = ((bb_spur * 524288) / 100) &
2172 AR_PHY_TIMING11_SPUR_DELTA_PHASE;
2174 denominator = IS_CHAN_2GHZ(chan) ? 440 : 400;
2175 spur_freq_sd = ((bb_spur * 2048) / denominator) & 0x3ff;
2177 new = (AR_PHY_TIMING11_USE_SPUR_IN_AGC |
2178 SM(spur_freq_sd, AR_PHY_TIMING11_SPUR_FREQ_SD) |
2179 SM(spur_delta_phase, AR_PHY_TIMING11_SPUR_DELTA_PHASE));
2180 REG_WRITE(ah, AR_PHY_TIMING11, new);
2186 for (i = 0; i < 4; i++) {
2190 for (bp = 0; bp < 30; bp++) {
2191 if ((cur_bin > lower) && (cur_bin < upper)) {
2192 pilot_mask = pilot_mask | 0x1 << bp;
2193 chan_mask = chan_mask | 0x1 << bp;
2198 REG_WRITE(ah, pilot_mask_reg[i], pilot_mask);
2199 REG_WRITE(ah, chan_mask_reg[i], chan_mask);
2202 cur_vit_mask = 6100;
2206 for (i = 0; i < 123; i++) {
2207 if ((cur_vit_mask > lower) && (cur_vit_mask < upper)) {
2209 /* workaround for gcc bug #37014 */
2210 volatile int tmp_v = abs(cur_vit_mask - bin);
2216 if (cur_vit_mask < 0)
2217 mask_m[abs(cur_vit_mask / 100)] = mask_amt;
2219 mask_p[cur_vit_mask / 100] = mask_amt;
2221 cur_vit_mask -= 100;
2224 tmp_mask = (mask_m[46] << 30) | (mask_m[47] << 28)
2225 | (mask_m[48] << 26) | (mask_m[49] << 24)
2226 | (mask_m[50] << 22) | (mask_m[51] << 20)
2227 | (mask_m[52] << 18) | (mask_m[53] << 16)
2228 | (mask_m[54] << 14) | (mask_m[55] << 12)
2229 | (mask_m[56] << 10) | (mask_m[57] << 8)
2230 | (mask_m[58] << 6) | (mask_m[59] << 4)
2231 | (mask_m[60] << 2) | (mask_m[61] << 0);
2232 REG_WRITE(ah, AR_PHY_BIN_MASK_1, tmp_mask);
2233 REG_WRITE(ah, AR_PHY_VIT_MASK2_M_46_61, tmp_mask);
2235 tmp_mask = (mask_m[31] << 28)
2236 | (mask_m[32] << 26) | (mask_m[33] << 24)
2237 | (mask_m[34] << 22) | (mask_m[35] << 20)
2238 | (mask_m[36] << 18) | (mask_m[37] << 16)
2239 | (mask_m[48] << 14) | (mask_m[39] << 12)
2240 | (mask_m[40] << 10) | (mask_m[41] << 8)
2241 | (mask_m[42] << 6) | (mask_m[43] << 4)
2242 | (mask_m[44] << 2) | (mask_m[45] << 0);
2243 REG_WRITE(ah, AR_PHY_BIN_MASK_2, tmp_mask);
2244 REG_WRITE(ah, AR_PHY_MASK2_M_31_45, tmp_mask);
2246 tmp_mask = (mask_m[16] << 30) | (mask_m[16] << 28)
2247 | (mask_m[18] << 26) | (mask_m[18] << 24)
2248 | (mask_m[20] << 22) | (mask_m[20] << 20)
2249 | (mask_m[22] << 18) | (mask_m[22] << 16)
2250 | (mask_m[24] << 14) | (mask_m[24] << 12)
2251 | (mask_m[25] << 10) | (mask_m[26] << 8)
2252 | (mask_m[27] << 6) | (mask_m[28] << 4)
2253 | (mask_m[29] << 2) | (mask_m[30] << 0);
2254 REG_WRITE(ah, AR_PHY_BIN_MASK_3, tmp_mask);
2255 REG_WRITE(ah, AR_PHY_MASK2_M_16_30, tmp_mask);
2257 tmp_mask = (mask_m[0] << 30) | (mask_m[1] << 28)
2258 | (mask_m[2] << 26) | (mask_m[3] << 24)
2259 | (mask_m[4] << 22) | (mask_m[5] << 20)
2260 | (mask_m[6] << 18) | (mask_m[7] << 16)
2261 | (mask_m[8] << 14) | (mask_m[9] << 12)
2262 | (mask_m[10] << 10) | (mask_m[11] << 8)
2263 | (mask_m[12] << 6) | (mask_m[13] << 4)
2264 | (mask_m[14] << 2) | (mask_m[15] << 0);
2265 REG_WRITE(ah, AR_PHY_MASK_CTL, tmp_mask);
2266 REG_WRITE(ah, AR_PHY_MASK2_M_00_15, tmp_mask);
2268 tmp_mask = (mask_p[15] << 28)
2269 | (mask_p[14] << 26) | (mask_p[13] << 24)
2270 | (mask_p[12] << 22) | (mask_p[11] << 20)
2271 | (mask_p[10] << 18) | (mask_p[9] << 16)
2272 | (mask_p[8] << 14) | (mask_p[7] << 12)
2273 | (mask_p[6] << 10) | (mask_p[5] << 8)
2274 | (mask_p[4] << 6) | (mask_p[3] << 4)
2275 | (mask_p[2] << 2) | (mask_p[1] << 0);
2276 REG_WRITE(ah, AR_PHY_BIN_MASK2_1, tmp_mask);
2277 REG_WRITE(ah, AR_PHY_MASK2_P_15_01, tmp_mask);
2279 tmp_mask = (mask_p[30] << 28)
2280 | (mask_p[29] << 26) | (mask_p[28] << 24)
2281 | (mask_p[27] << 22) | (mask_p[26] << 20)
2282 | (mask_p[25] << 18) | (mask_p[24] << 16)
2283 | (mask_p[23] << 14) | (mask_p[22] << 12)
2284 | (mask_p[21] << 10) | (mask_p[20] << 8)
2285 | (mask_p[19] << 6) | (mask_p[18] << 4)
2286 | (mask_p[17] << 2) | (mask_p[16] << 0);
2287 REG_WRITE(ah, AR_PHY_BIN_MASK2_2, tmp_mask);
2288 REG_WRITE(ah, AR_PHY_MASK2_P_30_16, tmp_mask);
2290 tmp_mask = (mask_p[45] << 28)
2291 | (mask_p[44] << 26) | (mask_p[43] << 24)
2292 | (mask_p[42] << 22) | (mask_p[41] << 20)
2293 | (mask_p[40] << 18) | (mask_p[39] << 16)
2294 | (mask_p[38] << 14) | (mask_p[37] << 12)
2295 | (mask_p[36] << 10) | (mask_p[35] << 8)
2296 | (mask_p[34] << 6) | (mask_p[33] << 4)
2297 | (mask_p[32] << 2) | (mask_p[31] << 0);
2298 REG_WRITE(ah, AR_PHY_BIN_MASK2_3, tmp_mask);
2299 REG_WRITE(ah, AR_PHY_MASK2_P_45_31, tmp_mask);
2301 tmp_mask = (mask_p[61] << 30) | (mask_p[60] << 28)
2302 | (mask_p[59] << 26) | (mask_p[58] << 24)
2303 | (mask_p[57] << 22) | (mask_p[56] << 20)
2304 | (mask_p[55] << 18) | (mask_p[54] << 16)
2305 | (mask_p[53] << 14) | (mask_p[52] << 12)
2306 | (mask_p[51] << 10) | (mask_p[50] << 8)
2307 | (mask_p[49] << 6) | (mask_p[48] << 4)
2308 | (mask_p[47] << 2) | (mask_p[46] << 0);
2309 REG_WRITE(ah, AR_PHY_BIN_MASK2_4, tmp_mask);
2310 REG_WRITE(ah, AR_PHY_MASK2_P_61_45, tmp_mask);
2313 static void ath9k_enable_rfkill(struct ath_hw *ah)
2315 REG_SET_BIT(ah, AR_GPIO_INPUT_EN_VAL,
2316 AR_GPIO_INPUT_EN_VAL_RFSILENT_BB);
2318 REG_CLR_BIT(ah, AR_GPIO_INPUT_MUX2,
2319 AR_GPIO_INPUT_MUX2_RFSILENT);
2321 ath9k_hw_cfg_gpio_input(ah, ah->rfkill_gpio);
2322 REG_SET_BIT(ah, AR_PHY_TEST, RFSILENT_BB);
2325 int ath9k_hw_reset(struct ath_hw *ah, struct ath9k_channel *chan,
2326 bool bChannelChange)
2328 struct ath_common *common = ath9k_hw_common(ah);
2330 struct ath9k_channel *curchan = ah->curchan;
2334 int i, rx_chainmask, r;
2336 ah->txchainmask = common->tx_chainmask;
2337 ah->rxchainmask = common->rx_chainmask;
2339 if (!ath9k_hw_setpower(ah, ATH9K_PM_AWAKE))
2342 if (curchan && !ah->chip_fullsleep)
2343 ath9k_hw_getnf(ah, curchan);
2345 if (bChannelChange &&
2346 (ah->chip_fullsleep != true) &&
2347 (ah->curchan != NULL) &&
2348 (chan->channel != ah->curchan->channel) &&
2349 ((chan->channelFlags & CHANNEL_ALL) ==
2350 (ah->curchan->channelFlags & CHANNEL_ALL)) &&
2351 !(AR_SREV_9280(ah) || IS_CHAN_A_5MHZ_SPACED(chan) ||
2352 IS_CHAN_A_5MHZ_SPACED(ah->curchan))) {
2354 if (ath9k_hw_channel_change(ah, chan)) {
2355 ath9k_hw_loadnf(ah, ah->curchan);
2356 ath9k_hw_start_nfcal(ah);
2361 saveDefAntenna = REG_READ(ah, AR_DEF_ANTENNA);
2362 if (saveDefAntenna == 0)
2365 macStaId1 = REG_READ(ah, AR_STA_ID1) & AR_STA_ID1_BASE_RATE_11B;
2367 /* For chips on which RTC reset is done, save TSF before it gets cleared */
2368 if (AR_SREV_9280(ah) && ah->eep_ops->get_eeprom(ah, EEP_OL_PWRCTRL))
2369 tsf = ath9k_hw_gettsf64(ah);
2371 saveLedState = REG_READ(ah, AR_CFG_LED) &
2372 (AR_CFG_LED_ASSOC_CTL | AR_CFG_LED_MODE_SEL |
2373 AR_CFG_LED_BLINK_THRESH_SEL | AR_CFG_LED_BLINK_SLOW);
2375 ath9k_hw_mark_phy_inactive(ah);
2377 if (AR_SREV_9271(ah) && ah->htc_reset_init) {
2379 AR9271_RESET_POWER_DOWN_CONTROL,
2380 AR9271_RADIO_RF_RST);
2384 if (!ath9k_hw_chip_reset(ah, chan)) {
2385 ath_print(common, ATH_DBG_FATAL, "Chip reset failed\n");
2389 if (AR_SREV_9271(ah) && ah->htc_reset_init) {
2390 ah->htc_reset_init = false;
2392 AR9271_RESET_POWER_DOWN_CONTROL,
2393 AR9271_GATE_MAC_CTL);
2398 if (tsf && AR_SREV_9280(ah) && ah->eep_ops->get_eeprom(ah, EEP_OL_PWRCTRL))
2399 ath9k_hw_settsf64(ah, tsf);
2401 if (AR_SREV_9280_10_OR_LATER(ah))
2402 REG_SET_BIT(ah, AR_GPIO_INPUT_EN_VAL, AR_GPIO_JTAG_DISABLE);
2404 if (AR_SREV_9287_12_OR_LATER(ah)) {
2405 /* Enable ASYNC FIFO */
2406 REG_SET_BIT(ah, AR_MAC_PCU_ASYNC_FIFO_REG3,
2407 AR_MAC_PCU_ASYNC_FIFO_REG3_DATAPATH_SEL);
2408 REG_SET_BIT(ah, AR_PHY_MODE, AR_PHY_MODE_ASYNCFIFO);
2409 REG_CLR_BIT(ah, AR_MAC_PCU_ASYNC_FIFO_REG3,
2410 AR_MAC_PCU_ASYNC_FIFO_REG3_SOFT_RESET);
2411 REG_SET_BIT(ah, AR_MAC_PCU_ASYNC_FIFO_REG3,
2412 AR_MAC_PCU_ASYNC_FIFO_REG3_SOFT_RESET);
2414 r = ath9k_hw_process_ini(ah, chan);
2418 /* Setup MFP options for CCMP */
2419 if (AR_SREV_9280_20_OR_LATER(ah)) {
2420 /* Mask Retry(b11), PwrMgt(b12), MoreData(b13) to 0 in mgmt
2421 * frames when constructing CCMP AAD. */
2422 REG_RMW_FIELD(ah, AR_AES_MUTE_MASK1, AR_AES_MUTE_MASK1_FC_MGMT,
2424 ah->sw_mgmt_crypto = false;
2425 } else if (AR_SREV_9160_10_OR_LATER(ah)) {
2426 /* Disable hardware crypto for management frames */
2427 REG_CLR_BIT(ah, AR_PCU_MISC_MODE2,
2428 AR_PCU_MISC_MODE2_MGMT_CRYPTO_ENABLE);
2429 REG_SET_BIT(ah, AR_PCU_MISC_MODE2,
2430 AR_PCU_MISC_MODE2_NO_CRYPTO_FOR_NON_DATA_PKT);
2431 ah->sw_mgmt_crypto = true;
2433 ah->sw_mgmt_crypto = true;
2435 if (IS_CHAN_OFDM(chan) || IS_CHAN_HT(chan))
2436 ath9k_hw_set_delta_slope(ah, chan);
2438 if (AR_SREV_9280_10_OR_LATER(ah))
2439 ath9k_hw_9280_spur_mitigate(ah, chan);
2441 ath9k_hw_spur_mitigate(ah, chan);
2443 ah->eep_ops->set_board_values(ah, chan);
2445 ath9k_hw_decrease_chain_power(ah, chan);
2447 REG_WRITE(ah, AR_STA_ID0, get_unaligned_le32(common->macaddr));
2448 REG_WRITE(ah, AR_STA_ID1, get_unaligned_le16(common->macaddr + 4)
2450 | AR_STA_ID1_RTS_USE_DEF
2452 ack_6mb ? AR_STA_ID1_ACKCTS_6MB : 0)
2453 | ah->sta_id1_defaults);
2454 ath9k_hw_set_operating_mode(ah, ah->opmode);
2456 ath_hw_setbssidmask(common);
2458 REG_WRITE(ah, AR_DEF_ANTENNA, saveDefAntenna);
2460 ath9k_hw_write_associd(ah);
2462 REG_WRITE(ah, AR_ISR, ~0);
2464 REG_WRITE(ah, AR_RSSI_THR, INIT_RSSI_THR);
2466 if (AR_SREV_9280_10_OR_LATER(ah))
2467 ath9k_hw_ar9280_set_channel(ah, chan);
2469 if (!(ath9k_hw_set_channel(ah, chan)))
2472 for (i = 0; i < AR_NUM_DCU; i++)
2473 REG_WRITE(ah, AR_DQCUMASK(i), 1 << i);
2476 for (i = 0; i < ah->caps.total_queues; i++)
2477 ath9k_hw_resettxqueue(ah, i);
2479 ath9k_hw_init_interrupt_masks(ah, ah->opmode);
2480 ath9k_hw_init_qos(ah);
2482 if (ah->caps.hw_caps & ATH9K_HW_CAP_RFSILENT)
2483 ath9k_enable_rfkill(ah);
2485 ath9k_hw_init_user_settings(ah);
2487 if (AR_SREV_9287_12_OR_LATER(ah)) {
2488 REG_WRITE(ah, AR_D_GBL_IFS_SIFS,
2489 AR_D_GBL_IFS_SIFS_ASYNC_FIFO_DUR);
2490 REG_WRITE(ah, AR_D_GBL_IFS_SLOT,
2491 AR_D_GBL_IFS_SLOT_ASYNC_FIFO_DUR);
2492 REG_WRITE(ah, AR_D_GBL_IFS_EIFS,
2493 AR_D_GBL_IFS_EIFS_ASYNC_FIFO_DUR);
2495 REG_WRITE(ah, AR_TIME_OUT, AR_TIME_OUT_ACK_CTS_ASYNC_FIFO_DUR);
2496 REG_WRITE(ah, AR_USEC, AR_USEC_ASYNC_FIFO_DUR);
2498 REG_SET_BIT(ah, AR_MAC_PCU_LOGIC_ANALYZER,
2499 AR_MAC_PCU_LOGIC_ANALYZER_DISBUG20768);
2500 REG_RMW_FIELD(ah, AR_AHB_MODE, AR_AHB_CUSTOM_BURST_EN,
2501 AR_AHB_CUSTOM_BURST_ASYNC_FIFO_VAL);
2503 if (AR_SREV_9287_12_OR_LATER(ah)) {
2504 REG_SET_BIT(ah, AR_PCU_MISC_MODE2,
2505 AR_PCU_MISC_MODE2_ENABLE_AGGWEP);
2508 REG_WRITE(ah, AR_STA_ID1,
2509 REG_READ(ah, AR_STA_ID1) | AR_STA_ID1_PRESERVE_SEQNUM);
2511 ath9k_hw_set_dma(ah);
2513 REG_WRITE(ah, AR_OBS, 8);
2515 if (ah->config.intr_mitigation) {
2516 REG_RMW_FIELD(ah, AR_RIMT, AR_RIMT_LAST, 500);
2517 REG_RMW_FIELD(ah, AR_RIMT, AR_RIMT_FIRST, 2000);
2520 ath9k_hw_init_bb(ah, chan);
2522 if (!ath9k_hw_init_cal(ah, chan))
2525 rx_chainmask = ah->rxchainmask;
2526 if ((rx_chainmask == 0x5) || (rx_chainmask == 0x3)) {
2527 REG_WRITE(ah, AR_PHY_RX_CHAINMASK, rx_chainmask);
2528 REG_WRITE(ah, AR_PHY_CAL_CHAINMASK, rx_chainmask);
2531 REG_WRITE(ah, AR_CFG_LED, saveLedState | AR_CFG_SCLK_32KHZ);
2534 * For big endian systems turn on swapping for descriptors
2536 if (AR_SREV_9100(ah)) {
2538 mask = REG_READ(ah, AR_CFG);
2539 if (mask & (AR_CFG_SWRB | AR_CFG_SWTB | AR_CFG_SWRG)) {
2540 ath_print(common, ATH_DBG_RESET,
2541 "CFG Byte Swap Set 0x%x\n", mask);
2544 INIT_CONFIG_STATUS | AR_CFG_SWRB | AR_CFG_SWTB;
2545 REG_WRITE(ah, AR_CFG, mask);
2546 ath_print(common, ATH_DBG_RESET,
2547 "Setting CFG 0x%x\n", REG_READ(ah, AR_CFG));
2550 /* Configure AR9271 target WLAN */
2551 if (AR_SREV_9271(ah))
2552 REG_WRITE(ah, AR_CFG, AR_CFG_SWRB | AR_CFG_SWTB);
2555 REG_WRITE(ah, AR_CFG, AR_CFG_SWTD | AR_CFG_SWRD);
2559 if (ah->btcoex_hw.enabled)
2560 ath9k_hw_btcoex_enable(ah);
2565 /************************/
2566 /* Key Cache Management */
2567 /************************/
2569 bool ath9k_hw_keyreset(struct ath_hw *ah, u16 entry)
2573 if (entry >= ah->caps.keycache_size) {
2574 ath_print(ath9k_hw_common(ah), ATH_DBG_FATAL,
2575 "keychache entry %u out of range\n", entry);
2579 keyType = REG_READ(ah, AR_KEYTABLE_TYPE(entry));
2581 REG_WRITE(ah, AR_KEYTABLE_KEY0(entry), 0);
2582 REG_WRITE(ah, AR_KEYTABLE_KEY1(entry), 0);
2583 REG_WRITE(ah, AR_KEYTABLE_KEY2(entry), 0);
2584 REG_WRITE(ah, AR_KEYTABLE_KEY3(entry), 0);
2585 REG_WRITE(ah, AR_KEYTABLE_KEY4(entry), 0);
2586 REG_WRITE(ah, AR_KEYTABLE_TYPE(entry), AR_KEYTABLE_TYPE_CLR);
2587 REG_WRITE(ah, AR_KEYTABLE_MAC0(entry), 0);
2588 REG_WRITE(ah, AR_KEYTABLE_MAC1(entry), 0);
2590 if (keyType == AR_KEYTABLE_TYPE_TKIP && ATH9K_IS_MIC_ENABLED(ah)) {
2591 u16 micentry = entry + 64;
2593 REG_WRITE(ah, AR_KEYTABLE_KEY0(micentry), 0);
2594 REG_WRITE(ah, AR_KEYTABLE_KEY1(micentry), 0);
2595 REG_WRITE(ah, AR_KEYTABLE_KEY2(micentry), 0);
2596 REG_WRITE(ah, AR_KEYTABLE_KEY3(micentry), 0);
2603 bool ath9k_hw_keysetmac(struct ath_hw *ah, u16 entry, const u8 *mac)
2607 if (entry >= ah->caps.keycache_size) {
2608 ath_print(ath9k_hw_common(ah), ATH_DBG_FATAL,
2609 "keychache entry %u out of range\n", entry);
2614 macHi = (mac[5] << 8) | mac[4];
2615 macLo = (mac[3] << 24) |
2620 macLo |= (macHi & 1) << 31;
2625 REG_WRITE(ah, AR_KEYTABLE_MAC0(entry), macLo);
2626 REG_WRITE(ah, AR_KEYTABLE_MAC1(entry), macHi | AR_KEYTABLE_VALID);
2631 bool ath9k_hw_set_keycache_entry(struct ath_hw *ah, u16 entry,
2632 const struct ath9k_keyval *k,
2635 const struct ath9k_hw_capabilities *pCap = &ah->caps;
2636 struct ath_common *common = ath9k_hw_common(ah);
2637 u32 key0, key1, key2, key3, key4;
2640 if (entry >= pCap->keycache_size) {
2641 ath_print(common, ATH_DBG_FATAL,
2642 "keycache entry %u out of range\n", entry);
2646 switch (k->kv_type) {
2647 case ATH9K_CIPHER_AES_OCB:
2648 keyType = AR_KEYTABLE_TYPE_AES;
2650 case ATH9K_CIPHER_AES_CCM:
2651 if (!(pCap->hw_caps & ATH9K_HW_CAP_CIPHER_AESCCM)) {
2652 ath_print(common, ATH_DBG_ANY,
2653 "AES-CCM not supported by mac rev 0x%x\n",
2654 ah->hw_version.macRev);
2657 keyType = AR_KEYTABLE_TYPE_CCM;
2659 case ATH9K_CIPHER_TKIP:
2660 keyType = AR_KEYTABLE_TYPE_TKIP;
2661 if (ATH9K_IS_MIC_ENABLED(ah)
2662 && entry + 64 >= pCap->keycache_size) {
2663 ath_print(common, ATH_DBG_ANY,
2664 "entry %u inappropriate for TKIP\n", entry);
2668 case ATH9K_CIPHER_WEP:
2669 if (k->kv_len < WLAN_KEY_LEN_WEP40) {
2670 ath_print(common, ATH_DBG_ANY,
2671 "WEP key length %u too small\n", k->kv_len);
2674 if (k->kv_len <= WLAN_KEY_LEN_WEP40)
2675 keyType = AR_KEYTABLE_TYPE_40;
2676 else if (k->kv_len <= WLAN_KEY_LEN_WEP104)
2677 keyType = AR_KEYTABLE_TYPE_104;
2679 keyType = AR_KEYTABLE_TYPE_128;
2681 case ATH9K_CIPHER_CLR:
2682 keyType = AR_KEYTABLE_TYPE_CLR;
2685 ath_print(common, ATH_DBG_FATAL,
2686 "cipher %u not supported\n", k->kv_type);
2690 key0 = get_unaligned_le32(k->kv_val + 0);
2691 key1 = get_unaligned_le16(k->kv_val + 4);
2692 key2 = get_unaligned_le32(k->kv_val + 6);
2693 key3 = get_unaligned_le16(k->kv_val + 10);
2694 key4 = get_unaligned_le32(k->kv_val + 12);
2695 if (k->kv_len <= WLAN_KEY_LEN_WEP104)
2699 * Note: Key cache registers access special memory area that requires
2700 * two 32-bit writes to actually update the values in the internal
2701 * memory. Consequently, the exact order and pairs used here must be
2705 if (keyType == AR_KEYTABLE_TYPE_TKIP && ATH9K_IS_MIC_ENABLED(ah)) {
2706 u16 micentry = entry + 64;
2709 * Write inverted key[47:0] first to avoid Michael MIC errors
2710 * on frames that could be sent or received at the same time.
2711 * The correct key will be written in the end once everything
2714 REG_WRITE(ah, AR_KEYTABLE_KEY0(entry), ~key0);
2715 REG_WRITE(ah, AR_KEYTABLE_KEY1(entry), ~key1);
2717 /* Write key[95:48] */
2718 REG_WRITE(ah, AR_KEYTABLE_KEY2(entry), key2);
2719 REG_WRITE(ah, AR_KEYTABLE_KEY3(entry), key3);
2721 /* Write key[127:96] and key type */
2722 REG_WRITE(ah, AR_KEYTABLE_KEY4(entry), key4);
2723 REG_WRITE(ah, AR_KEYTABLE_TYPE(entry), keyType);
2725 /* Write MAC address for the entry */
2726 (void) ath9k_hw_keysetmac(ah, entry, mac);
2728 if (ah->misc_mode & AR_PCU_MIC_NEW_LOC_ENA) {
2730 * TKIP uses two key cache entries:
2731 * Michael MIC TX/RX keys in the same key cache entry
2732 * (idx = main index + 64):
2733 * key0 [31:0] = RX key [31:0]
2734 * key1 [15:0] = TX key [31:16]
2735 * key1 [31:16] = reserved
2736 * key2 [31:0] = RX key [63:32]
2737 * key3 [15:0] = TX key [15:0]
2738 * key3 [31:16] = reserved
2739 * key4 [31:0] = TX key [63:32]
2741 u32 mic0, mic1, mic2, mic3, mic4;
2743 mic0 = get_unaligned_le32(k->kv_mic + 0);
2744 mic2 = get_unaligned_le32(k->kv_mic + 4);
2745 mic1 = get_unaligned_le16(k->kv_txmic + 2) & 0xffff;
2746 mic3 = get_unaligned_le16(k->kv_txmic + 0) & 0xffff;
2747 mic4 = get_unaligned_le32(k->kv_txmic + 4);
2749 /* Write RX[31:0] and TX[31:16] */
2750 REG_WRITE(ah, AR_KEYTABLE_KEY0(micentry), mic0);
2751 REG_WRITE(ah, AR_KEYTABLE_KEY1(micentry), mic1);
2753 /* Write RX[63:32] and TX[15:0] */
2754 REG_WRITE(ah, AR_KEYTABLE_KEY2(micentry), mic2);
2755 REG_WRITE(ah, AR_KEYTABLE_KEY3(micentry), mic3);
2757 /* Write TX[63:32] and keyType(reserved) */
2758 REG_WRITE(ah, AR_KEYTABLE_KEY4(micentry), mic4);
2759 REG_WRITE(ah, AR_KEYTABLE_TYPE(micentry),
2760 AR_KEYTABLE_TYPE_CLR);
2764 * TKIP uses four key cache entries (two for group
2766 * Michael MIC TX/RX keys are in different key cache
2767 * entries (idx = main index + 64 for TX and
2768 * main index + 32 + 96 for RX):
2769 * key0 [31:0] = TX/RX MIC key [31:0]
2770 * key1 [31:0] = reserved
2771 * key2 [31:0] = TX/RX MIC key [63:32]
2772 * key3 [31:0] = reserved
2773 * key4 [31:0] = reserved
2775 * Upper layer code will call this function separately
2776 * for TX and RX keys when these registers offsets are
2781 mic0 = get_unaligned_le32(k->kv_mic + 0);
2782 mic2 = get_unaligned_le32(k->kv_mic + 4);
2784 /* Write MIC key[31:0] */
2785 REG_WRITE(ah, AR_KEYTABLE_KEY0(micentry), mic0);
2786 REG_WRITE(ah, AR_KEYTABLE_KEY1(micentry), 0);
2788 /* Write MIC key[63:32] */
2789 REG_WRITE(ah, AR_KEYTABLE_KEY2(micentry), mic2);
2790 REG_WRITE(ah, AR_KEYTABLE_KEY3(micentry), 0);
2792 /* Write TX[63:32] and keyType(reserved) */
2793 REG_WRITE(ah, AR_KEYTABLE_KEY4(micentry), 0);
2794 REG_WRITE(ah, AR_KEYTABLE_TYPE(micentry),
2795 AR_KEYTABLE_TYPE_CLR);
2798 /* MAC address registers are reserved for the MIC entry */
2799 REG_WRITE(ah, AR_KEYTABLE_MAC0(micentry), 0);
2800 REG_WRITE(ah, AR_KEYTABLE_MAC1(micentry), 0);
2803 * Write the correct (un-inverted) key[47:0] last to enable
2804 * TKIP now that all other registers are set with correct
2807 REG_WRITE(ah, AR_KEYTABLE_KEY0(entry), key0);
2808 REG_WRITE(ah, AR_KEYTABLE_KEY1(entry), key1);
2810 /* Write key[47:0] */
2811 REG_WRITE(ah, AR_KEYTABLE_KEY0(entry), key0);
2812 REG_WRITE(ah, AR_KEYTABLE_KEY1(entry), key1);
2814 /* Write key[95:48] */
2815 REG_WRITE(ah, AR_KEYTABLE_KEY2(entry), key2);
2816 REG_WRITE(ah, AR_KEYTABLE_KEY3(entry), key3);
2818 /* Write key[127:96] and key type */
2819 REG_WRITE(ah, AR_KEYTABLE_KEY4(entry), key4);
2820 REG_WRITE(ah, AR_KEYTABLE_TYPE(entry), keyType);
2822 /* Write MAC address for the entry */
2823 (void) ath9k_hw_keysetmac(ah, entry, mac);
2829 bool ath9k_hw_keyisvalid(struct ath_hw *ah, u16 entry)
2831 if (entry < ah->caps.keycache_size) {
2832 u32 val = REG_READ(ah, AR_KEYTABLE_MAC1(entry));
2833 if (val & AR_KEYTABLE_VALID)
2839 /******************************/
2840 /* Power Management (Chipset) */
2841 /******************************/
2843 static void ath9k_set_power_sleep(struct ath_hw *ah, int setChip)
2845 REG_SET_BIT(ah, AR_STA_ID1, AR_STA_ID1_PWR_SAV);
2847 REG_CLR_BIT(ah, AR_RTC_FORCE_WAKE,
2848 AR_RTC_FORCE_WAKE_EN);
2849 if (!AR_SREV_9100(ah))
2850 REG_WRITE(ah, AR_RC, AR_RC_AHB | AR_RC_HOSTIF);
2852 if(!AR_SREV_5416(ah))
2853 REG_CLR_BIT(ah, (AR_RTC_RESET),
2858 static void ath9k_set_power_network_sleep(struct ath_hw *ah, int setChip)
2860 REG_SET_BIT(ah, AR_STA_ID1, AR_STA_ID1_PWR_SAV);
2862 struct ath9k_hw_capabilities *pCap = &ah->caps;
2864 if (!(pCap->hw_caps & ATH9K_HW_CAP_AUTOSLEEP)) {
2865 REG_WRITE(ah, AR_RTC_FORCE_WAKE,
2866 AR_RTC_FORCE_WAKE_ON_INT);
2868 REG_CLR_BIT(ah, AR_RTC_FORCE_WAKE,
2869 AR_RTC_FORCE_WAKE_EN);
2874 static bool ath9k_hw_set_power_awake(struct ath_hw *ah, int setChip)
2880 if ((REG_READ(ah, AR_RTC_STATUS) &
2881 AR_RTC_STATUS_M) == AR_RTC_STATUS_SHUTDOWN) {
2882 if (ath9k_hw_set_reset_reg(ah,
2883 ATH9K_RESET_POWER_ON) != true) {
2886 ath9k_hw_init_pll(ah, NULL);
2888 if (AR_SREV_9100(ah))
2889 REG_SET_BIT(ah, AR_RTC_RESET,
2892 REG_SET_BIT(ah, AR_RTC_FORCE_WAKE,
2893 AR_RTC_FORCE_WAKE_EN);
2896 for (i = POWER_UP_TIME / 50; i > 0; i--) {
2897 val = REG_READ(ah, AR_RTC_STATUS) & AR_RTC_STATUS_M;
2898 if (val == AR_RTC_STATUS_ON)
2901 REG_SET_BIT(ah, AR_RTC_FORCE_WAKE,
2902 AR_RTC_FORCE_WAKE_EN);
2905 ath_print(ath9k_hw_common(ah), ATH_DBG_FATAL,
2906 "Failed to wakeup in %uus\n",
2907 POWER_UP_TIME / 20);
2912 REG_CLR_BIT(ah, AR_STA_ID1, AR_STA_ID1_PWR_SAV);
2917 bool ath9k_hw_setpower(struct ath_hw *ah, enum ath9k_power_mode mode)
2919 struct ath_common *common = ath9k_hw_common(ah);
2920 int status = true, setChip = true;
2921 static const char *modes[] = {
2928 if (ah->power_mode == mode)
2931 ath_print(common, ATH_DBG_RESET, "%s -> %s\n",
2932 modes[ah->power_mode], modes[mode]);
2935 case ATH9K_PM_AWAKE:
2936 status = ath9k_hw_set_power_awake(ah, setChip);
2938 case ATH9K_PM_FULL_SLEEP:
2939 ath9k_set_power_sleep(ah, setChip);
2940 ah->chip_fullsleep = true;
2942 case ATH9K_PM_NETWORK_SLEEP:
2943 ath9k_set_power_network_sleep(ah, setChip);
2946 ath_print(common, ATH_DBG_FATAL,
2947 "Unknown power mode %u\n", mode);
2950 ah->power_mode = mode;
2956 * Helper for ASPM support.
2958 * Disable PLL when in L0s as well as receiver clock when in L1.
2959 * This power saving option must be enabled through the SerDes.
2961 * Programming the SerDes must go through the same 288 bit serial shift
2962 * register as the other analog registers. Hence the 9 writes.
2964 void ath9k_hw_configpcipowersave(struct ath_hw *ah, int restore, int power_off)
2969 if (ah->is_pciexpress != true)
2972 /* Do not touch SerDes registers */
2973 if (ah->config.pcie_powersave_enable == 2)
2976 /* Nothing to do on restore for 11N */
2978 if (AR_SREV_9280_20_OR_LATER(ah)) {
2980 * AR9280 2.0 or later chips use SerDes values from the
2981 * initvals.h initialized depending on chipset during
2984 for (i = 0; i < ah->iniPcieSerdes.ia_rows; i++) {
2985 REG_WRITE(ah, INI_RA(&ah->iniPcieSerdes, i, 0),
2986 INI_RA(&ah->iniPcieSerdes, i, 1));
2988 } else if (AR_SREV_9280(ah) &&
2989 (ah->hw_version.macRev == AR_SREV_REVISION_9280_10)) {
2990 REG_WRITE(ah, AR_PCIE_SERDES, 0x9248fd00);
2991 REG_WRITE(ah, AR_PCIE_SERDES, 0x24924924);
2993 /* RX shut off when elecidle is asserted */
2994 REG_WRITE(ah, AR_PCIE_SERDES, 0xa8000019);
2995 REG_WRITE(ah, AR_PCIE_SERDES, 0x13160820);
2996 REG_WRITE(ah, AR_PCIE_SERDES, 0xe5980560);
2998 /* Shut off CLKREQ active in L1 */
2999 if (ah->config.pcie_clock_req)
3000 REG_WRITE(ah, AR_PCIE_SERDES, 0x401deffc);
3002 REG_WRITE(ah, AR_PCIE_SERDES, 0x401deffd);
3004 REG_WRITE(ah, AR_PCIE_SERDES, 0x1aaabe40);
3005 REG_WRITE(ah, AR_PCIE_SERDES, 0xbe105554);
3006 REG_WRITE(ah, AR_PCIE_SERDES, 0x00043007);
3008 /* Load the new settings */
3009 REG_WRITE(ah, AR_PCIE_SERDES2, 0x00000000);
3012 REG_WRITE(ah, AR_PCIE_SERDES, 0x9248fc00);
3013 REG_WRITE(ah, AR_PCIE_SERDES, 0x24924924);
3015 /* RX shut off when elecidle is asserted */
3016 REG_WRITE(ah, AR_PCIE_SERDES, 0x28000039);
3017 REG_WRITE(ah, AR_PCIE_SERDES, 0x53160824);
3018 REG_WRITE(ah, AR_PCIE_SERDES, 0xe5980579);
3021 * Ignore ah->ah_config.pcie_clock_req setting for
3024 REG_WRITE(ah, AR_PCIE_SERDES, 0x001defff);
3026 REG_WRITE(ah, AR_PCIE_SERDES, 0x1aaabe40);
3027 REG_WRITE(ah, AR_PCIE_SERDES, 0xbe105554);
3028 REG_WRITE(ah, AR_PCIE_SERDES, 0x000e3007);
3030 /* Load the new settings */
3031 REG_WRITE(ah, AR_PCIE_SERDES2, 0x00000000);
3036 /* set bit 19 to allow forcing of pcie core into L1 state */
3037 REG_SET_BIT(ah, AR_PCIE_PM_CTRL, AR_PCIE_PM_CTRL_ENA);
3039 /* Several PCIe massages to ensure proper behaviour */
3040 if (ah->config.pcie_waen) {
3041 val = ah->config.pcie_waen;
3043 val &= (~AR_WA_D3_L1_DISABLE);
3045 if (AR_SREV_9285(ah) || AR_SREV_9271(ah) ||
3047 val = AR9285_WA_DEFAULT;
3049 val &= (~AR_WA_D3_L1_DISABLE);
3050 } else if (AR_SREV_9280(ah)) {
3052 * On AR9280 chips bit 22 of 0x4004 needs to be
3053 * set otherwise card may disappear.