2 * Copyright (c) 2008-2009 Atheros Communications Inc.
4 * Permission to use, copy, modify, and/or distribute this software for any
5 * purpose with or without fee is hereby granted, provided that the above
6 * copyright notice and this permission notice appear in all copies.
8 * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
9 * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
10 * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
11 * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
12 * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
13 * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
14 * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
18 #include <asm/unaligned.h>
24 #define ATH9K_CLOCK_RATE_CCK 22
25 #define ATH9K_CLOCK_RATE_5GHZ_OFDM 40
26 #define ATH9K_CLOCK_RATE_2GHZ_OFDM 44
28 static bool ath9k_hw_set_reset_reg(struct ath_hw *ah, u32 type);
29 static void ath9k_hw_set_regs(struct ath_hw *ah, struct ath9k_channel *chan);
30 static u32 ath9k_hw_ini_fixup(struct ath_hw *ah,
31 struct ar5416_eeprom_def *pEepData,
33 static void ath9k_hw_9280_spur_mitigate(struct ath_hw *ah, struct ath9k_channel *chan);
34 static void ath9k_hw_spur_mitigate(struct ath_hw *ah, struct ath9k_channel *chan);
36 /********************/
37 /* Helper Functions */
38 /********************/
40 static u32 ath9k_hw_mac_usec(struct ath_hw *ah, u32 clks)
42 struct ieee80211_conf *conf = &ath9k_hw_common(ah)->hw->conf;
44 if (!ah->curchan) /* should really check for CCK instead */
45 return clks / ATH9K_CLOCK_RATE_CCK;
46 if (conf->channel->band == IEEE80211_BAND_2GHZ)
47 return clks / ATH9K_CLOCK_RATE_2GHZ_OFDM;
49 return clks / ATH9K_CLOCK_RATE_5GHZ_OFDM;
52 static u32 ath9k_hw_mac_to_usec(struct ath_hw *ah, u32 clks)
54 struct ieee80211_conf *conf = &ath9k_hw_common(ah)->hw->conf;
56 if (conf_is_ht40(conf))
57 return ath9k_hw_mac_usec(ah, clks) / 2;
59 return ath9k_hw_mac_usec(ah, clks);
62 static u32 ath9k_hw_mac_clks(struct ath_hw *ah, u32 usecs)
64 struct ieee80211_conf *conf = &ath9k_hw_common(ah)->hw->conf;
66 if (!ah->curchan) /* should really check for CCK instead */
67 return usecs *ATH9K_CLOCK_RATE_CCK;
68 if (conf->channel->band == IEEE80211_BAND_2GHZ)
69 return usecs *ATH9K_CLOCK_RATE_2GHZ_OFDM;
70 return usecs *ATH9K_CLOCK_RATE_5GHZ_OFDM;
73 static u32 ath9k_hw_mac_to_clks(struct ath_hw *ah, u32 usecs)
75 struct ieee80211_conf *conf = &ath9k_hw_common(ah)->hw->conf;
77 if (conf_is_ht40(conf))
78 return ath9k_hw_mac_clks(ah, usecs) * 2;
80 return ath9k_hw_mac_clks(ah, usecs);
83 bool ath9k_hw_wait(struct ath_hw *ah, u32 reg, u32 mask, u32 val, u32 timeout)
87 BUG_ON(timeout < AH_TIME_QUANTUM);
89 for (i = 0; i < (timeout / AH_TIME_QUANTUM); i++) {
90 if ((REG_READ(ah, reg) & mask) == val)
93 udelay(AH_TIME_QUANTUM);
96 ath_print(ath9k_hw_common(ah), ATH_DBG_ANY,
97 "timeout (%d us) on reg 0x%x: 0x%08x & 0x%08x != 0x%08x\n",
98 timeout, reg, REG_READ(ah, reg), mask, val);
103 u32 ath9k_hw_reverse_bits(u32 val, u32 n)
108 for (i = 0, retval = 0; i < n; i++) {
109 retval = (retval << 1) | (val & 1);
115 bool ath9k_get_channel_edges(struct ath_hw *ah,
119 struct ath9k_hw_capabilities *pCap = &ah->caps;
121 if (flags & CHANNEL_5GHZ) {
122 *low = pCap->low_5ghz_chan;
123 *high = pCap->high_5ghz_chan;
126 if ((flags & CHANNEL_2GHZ)) {
127 *low = pCap->low_2ghz_chan;
128 *high = pCap->high_2ghz_chan;
134 u16 ath9k_hw_computetxtime(struct ath_hw *ah,
135 const struct ath_rate_table *rates,
136 u32 frameLen, u16 rateix,
139 u32 bitsPerSymbol, numBits, numSymbols, phyTime, txTime;
142 kbps = rates->info[rateix].ratekbps;
147 switch (rates->info[rateix].phy) {
148 case WLAN_RC_PHY_CCK:
149 phyTime = CCK_PREAMBLE_BITS + CCK_PLCP_BITS;
150 if (shortPreamble && rates->info[rateix].short_preamble)
152 numBits = frameLen << 3;
153 txTime = CCK_SIFS_TIME + phyTime + ((numBits * 1000) / kbps);
155 case WLAN_RC_PHY_OFDM:
156 if (ah->curchan && IS_CHAN_QUARTER_RATE(ah->curchan)) {
157 bitsPerSymbol = (kbps * OFDM_SYMBOL_TIME_QUARTER) / 1000;
158 numBits = OFDM_PLCP_BITS + (frameLen << 3);
159 numSymbols = DIV_ROUND_UP(numBits, bitsPerSymbol);
160 txTime = OFDM_SIFS_TIME_QUARTER
161 + OFDM_PREAMBLE_TIME_QUARTER
162 + (numSymbols * OFDM_SYMBOL_TIME_QUARTER);
163 } else if (ah->curchan &&
164 IS_CHAN_HALF_RATE(ah->curchan)) {
165 bitsPerSymbol = (kbps * OFDM_SYMBOL_TIME_HALF) / 1000;
166 numBits = OFDM_PLCP_BITS + (frameLen << 3);
167 numSymbols = DIV_ROUND_UP(numBits, bitsPerSymbol);
168 txTime = OFDM_SIFS_TIME_HALF +
169 OFDM_PREAMBLE_TIME_HALF
170 + (numSymbols * OFDM_SYMBOL_TIME_HALF);
172 bitsPerSymbol = (kbps * OFDM_SYMBOL_TIME) / 1000;
173 numBits = OFDM_PLCP_BITS + (frameLen << 3);
174 numSymbols = DIV_ROUND_UP(numBits, bitsPerSymbol);
175 txTime = OFDM_SIFS_TIME + OFDM_PREAMBLE_TIME
176 + (numSymbols * OFDM_SYMBOL_TIME);
180 ath_print(ath9k_hw_common(ah), ATH_DBG_FATAL,
181 "Unknown phy %u (rate ix %u)\n",
182 rates->info[rateix].phy, rateix);
190 void ath9k_hw_get_channel_centers(struct ath_hw *ah,
191 struct ath9k_channel *chan,
192 struct chan_centers *centers)
196 if (!IS_CHAN_HT40(chan)) {
197 centers->ctl_center = centers->ext_center =
198 centers->synth_center = chan->channel;
202 if ((chan->chanmode == CHANNEL_A_HT40PLUS) ||
203 (chan->chanmode == CHANNEL_G_HT40PLUS)) {
204 centers->synth_center =
205 chan->channel + HT40_CHANNEL_CENTER_SHIFT;
208 centers->synth_center =
209 chan->channel - HT40_CHANNEL_CENTER_SHIFT;
213 centers->ctl_center =
214 centers->synth_center - (extoff * HT40_CHANNEL_CENTER_SHIFT);
215 /* 25 MHz spacing is supported by hw but not on upper layers */
216 centers->ext_center =
217 centers->synth_center + (extoff * HT40_CHANNEL_CENTER_SHIFT);
224 static void ath9k_hw_read_revisions(struct ath_hw *ah)
228 val = REG_READ(ah, AR_SREV) & AR_SREV_ID;
231 val = REG_READ(ah, AR_SREV);
232 ah->hw_version.macVersion =
233 (val & AR_SREV_VERSION2) >> AR_SREV_TYPE2_S;
234 ah->hw_version.macRev = MS(val, AR_SREV_REVISION2);
235 ah->is_pciexpress = (val & AR_SREV_TYPE2_HOST_MODE) ? 0 : 1;
237 if (!AR_SREV_9100(ah))
238 ah->hw_version.macVersion = MS(val, AR_SREV_VERSION);
240 ah->hw_version.macRev = val & AR_SREV_REVISION;
242 if (ah->hw_version.macVersion == AR_SREV_VERSION_5416_PCIE)
243 ah->is_pciexpress = true;
247 static int ath9k_hw_get_radiorev(struct ath_hw *ah)
252 REG_WRITE(ah, AR_PHY(0x36), 0x00007058);
254 for (i = 0; i < 8; i++)
255 REG_WRITE(ah, AR_PHY(0x20), 0x00010000);
256 val = (REG_READ(ah, AR_PHY(256)) >> 24) & 0xff;
257 val = ((val & 0xf0) >> 4) | ((val & 0x0f) << 4);
259 return ath9k_hw_reverse_bits(val, 8);
262 /************************************/
263 /* HW Attach, Detach, Init Routines */
264 /************************************/
266 static void ath9k_hw_disablepcie(struct ath_hw *ah)
268 if (AR_SREV_9100(ah))
271 REG_WRITE(ah, AR_PCIE_SERDES, 0x9248fc00);
272 REG_WRITE(ah, AR_PCIE_SERDES, 0x24924924);
273 REG_WRITE(ah, AR_PCIE_SERDES, 0x28000029);
274 REG_WRITE(ah, AR_PCIE_SERDES, 0x57160824);
275 REG_WRITE(ah, AR_PCIE_SERDES, 0x25980579);
276 REG_WRITE(ah, AR_PCIE_SERDES, 0x00000000);
277 REG_WRITE(ah, AR_PCIE_SERDES, 0x1aaabe40);
278 REG_WRITE(ah, AR_PCIE_SERDES, 0xbe105554);
279 REG_WRITE(ah, AR_PCIE_SERDES, 0x000e1007);
281 REG_WRITE(ah, AR_PCIE_SERDES2, 0x00000000);
284 static bool ath9k_hw_chip_test(struct ath_hw *ah)
286 struct ath_common *common = ath9k_hw_common(ah);
287 u32 regAddr[2] = { AR_STA_ID0, AR_PHY_BASE + (8 << 2) };
289 u32 patternData[4] = { 0x55555555,
295 for (i = 0; i < 2; i++) {
296 u32 addr = regAddr[i];
299 regHold[i] = REG_READ(ah, addr);
300 for (j = 0; j < 0x100; j++) {
301 wrData = (j << 16) | j;
302 REG_WRITE(ah, addr, wrData);
303 rdData = REG_READ(ah, addr);
304 if (rdData != wrData) {
305 ath_print(common, ATH_DBG_FATAL,
306 "address test failed "
307 "addr: 0x%08x - wr:0x%08x != "
309 addr, wrData, rdData);
313 for (j = 0; j < 4; j++) {
314 wrData = patternData[j];
315 REG_WRITE(ah, addr, wrData);
316 rdData = REG_READ(ah, addr);
317 if (wrData != rdData) {
318 ath_print(common, ATH_DBG_FATAL,
319 "address test failed "
320 "addr: 0x%08x - wr:0x%08x != "
322 addr, wrData, rdData);
326 REG_WRITE(ah, regAddr[i], regHold[i]);
333 static const char *ath9k_hw_devname(u16 devid)
336 case AR5416_DEVID_PCI:
337 return "Atheros 5416";
338 case AR5416_DEVID_PCIE:
339 return "Atheros 5418";
340 case AR9160_DEVID_PCI:
341 return "Atheros 9160";
342 case AR5416_AR9100_DEVID:
343 return "Atheros 9100";
344 case AR9280_DEVID_PCI:
345 case AR9280_DEVID_PCIE:
346 return "Atheros 9280";
347 case AR9285_DEVID_PCIE:
348 return "Atheros 9285";
349 case AR5416_DEVID_AR9287_PCI:
350 case AR5416_DEVID_AR9287_PCIE:
351 return "Atheros 9287";
357 static void ath9k_hw_init_config(struct ath_hw *ah)
361 ah->config.dma_beacon_response_time = 2;
362 ah->config.sw_beacon_response_time = 10;
363 ah->config.additional_swba_backoff = 0;
364 ah->config.ack_6mb = 0x0;
365 ah->config.cwm_ignore_extcca = 0;
366 ah->config.pcie_powersave_enable = 0;
367 ah->config.pcie_clock_req = 0;
368 ah->config.pcie_waen = 0;
369 ah->config.analog_shiftreg = 1;
370 ah->config.ht_enable = 1;
371 ah->config.ofdm_trig_low = 200;
372 ah->config.ofdm_trig_high = 500;
373 ah->config.cck_trig_high = 200;
374 ah->config.cck_trig_low = 100;
375 ah->config.enable_ani = 1;
376 ah->config.diversity_control = ATH9K_ANT_VARIABLE;
377 ah->config.antenna_switch_swap = 0;
379 for (i = 0; i < AR_EEPROM_MODAL_SPURS; i++) {
380 ah->config.spurchans[i][0] = AR_NO_SPUR;
381 ah->config.spurchans[i][1] = AR_NO_SPUR;
384 ah->config.intr_mitigation = true;
387 * We need this for PCI devices only (Cardbus, PCI, miniPCI)
388 * _and_ if on non-uniprocessor systems (Multiprocessor/HT).
389 * This means we use it for all AR5416 devices, and the few
390 * minor PCI AR9280 devices out there.
392 * Serialization is required because these devices do not handle
393 * well the case of two concurrent reads/writes due to the latency
394 * involved. During one read/write another read/write can be issued
395 * on another CPU while the previous read/write may still be working
396 * on our hardware, if we hit this case the hardware poops in a loop.
397 * We prevent this by serializing reads and writes.
399 * This issue is not present on PCI-Express devices or pre-AR5416
400 * devices (legacy, 802.11abg).
402 if (num_possible_cpus() > 1)
403 ah->config.serialize_regmode = SER_REG_MODE_AUTO;
406 static void ath9k_hw_init_defaults(struct ath_hw *ah)
408 struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah);
410 regulatory->country_code = CTRY_DEFAULT;
411 regulatory->power_limit = MAX_RATE_POWER;
412 regulatory->tp_scale = ATH9K_TP_SCALE_MAX;
414 ah->hw_version.magic = AR5416_MAGIC;
415 ah->hw_version.subvendorid = 0;
418 if (ah->hw_version.devid == AR5416_AR9100_DEVID)
419 ah->hw_version.macVersion = AR_SREV_VERSION_9100;
420 if (!AR_SREV_9100(ah))
421 ah->ah_flags = AH_USE_EEPROM;
424 ah->sta_id1_defaults = AR_STA_ID1_CRPT_MIC_ENABLE;
425 ah->beacon_interval = 100;
426 ah->enable_32kHz_clock = DONT_USE_32KHZ;
427 ah->slottime = (u32) -1;
428 ah->acktimeout = (u32) -1;
429 ah->ctstimeout = (u32) -1;
430 ah->globaltxtimeout = (u32) -1;
432 ah->gbeacon_rate = 0;
434 ah->power_mode = ATH9K_PM_UNDEFINED;
437 static int ath9k_hw_rfattach(struct ath_hw *ah)
439 bool rfStatus = false;
442 rfStatus = ath9k_hw_init_rf(ah, &ecode);
444 ath_print(ath9k_hw_common(ah), ATH_DBG_FATAL,
445 "RF setup failed, status: %u\n", ecode);
452 static int ath9k_hw_rf_claim(struct ath_hw *ah)
456 REG_WRITE(ah, AR_PHY(0), 0x00000007);
458 val = ath9k_hw_get_radiorev(ah);
459 switch (val & AR_RADIO_SREV_MAJOR) {
461 val = AR_RAD5133_SREV_MAJOR;
463 case AR_RAD5133_SREV_MAJOR:
464 case AR_RAD5122_SREV_MAJOR:
465 case AR_RAD2133_SREV_MAJOR:
466 case AR_RAD2122_SREV_MAJOR:
469 ath_print(ath9k_hw_common(ah), ATH_DBG_FATAL,
470 "Radio Chip Rev 0x%02X not supported\n",
471 val & AR_RADIO_SREV_MAJOR);
475 ah->hw_version.analog5GhzRev = val;
480 static int ath9k_hw_init_macaddr(struct ath_hw *ah)
482 struct ath_common *common = ath9k_hw_common(ah);
488 for (i = 0; i < 3; i++) {
489 eeval = ah->eep_ops->get_eeprom(ah, AR_EEPROM_MAC(i));
491 common->macaddr[2 * i] = eeval >> 8;
492 common->macaddr[2 * i + 1] = eeval & 0xff;
494 if (sum == 0 || sum == 0xffff * 3)
495 return -EADDRNOTAVAIL;
500 static void ath9k_hw_init_rxgain_ini(struct ath_hw *ah)
504 if (ah->eep_ops->get_eeprom(ah, EEP_MINOR_REV) >= AR5416_EEP_MINOR_VER_17) {
505 rxgain_type = ah->eep_ops->get_eeprom(ah, EEP_RXGAIN_TYPE);
507 if (rxgain_type == AR5416_EEP_RXGAIN_13DB_BACKOFF)
508 INIT_INI_ARRAY(&ah->iniModesRxGain,
509 ar9280Modes_backoff_13db_rxgain_9280_2,
510 ARRAY_SIZE(ar9280Modes_backoff_13db_rxgain_9280_2), 6);
511 else if (rxgain_type == AR5416_EEP_RXGAIN_23DB_BACKOFF)
512 INIT_INI_ARRAY(&ah->iniModesRxGain,
513 ar9280Modes_backoff_23db_rxgain_9280_2,
514 ARRAY_SIZE(ar9280Modes_backoff_23db_rxgain_9280_2), 6);
516 INIT_INI_ARRAY(&ah->iniModesRxGain,
517 ar9280Modes_original_rxgain_9280_2,
518 ARRAY_SIZE(ar9280Modes_original_rxgain_9280_2), 6);
520 INIT_INI_ARRAY(&ah->iniModesRxGain,
521 ar9280Modes_original_rxgain_9280_2,
522 ARRAY_SIZE(ar9280Modes_original_rxgain_9280_2), 6);
526 static void ath9k_hw_init_txgain_ini(struct ath_hw *ah)
530 if (ah->eep_ops->get_eeprom(ah, EEP_MINOR_REV) >= AR5416_EEP_MINOR_VER_19) {
531 txgain_type = ah->eep_ops->get_eeprom(ah, EEP_TXGAIN_TYPE);
533 if (txgain_type == AR5416_EEP_TXGAIN_HIGH_POWER)
534 INIT_INI_ARRAY(&ah->iniModesTxGain,
535 ar9280Modes_high_power_tx_gain_9280_2,
536 ARRAY_SIZE(ar9280Modes_high_power_tx_gain_9280_2), 6);
538 INIT_INI_ARRAY(&ah->iniModesTxGain,
539 ar9280Modes_original_tx_gain_9280_2,
540 ARRAY_SIZE(ar9280Modes_original_tx_gain_9280_2), 6);
542 INIT_INI_ARRAY(&ah->iniModesTxGain,
543 ar9280Modes_original_tx_gain_9280_2,
544 ARRAY_SIZE(ar9280Modes_original_tx_gain_9280_2), 6);
548 static int ath9k_hw_post_init(struct ath_hw *ah)
552 if (!ath9k_hw_chip_test(ah))
555 ecode = ath9k_hw_rf_claim(ah);
559 ecode = ath9k_hw_eeprom_init(ah);
563 ath_print(ath9k_hw_common(ah), ATH_DBG_CONFIG,
564 "Eeprom VER: %d, REV: %d\n",
565 ah->eep_ops->get_eeprom_ver(ah),
566 ah->eep_ops->get_eeprom_rev(ah));
568 ecode = ath9k_hw_rfattach(ah);
572 if (!AR_SREV_9100(ah)) {
573 ath9k_hw_ani_setup(ah);
574 ath9k_hw_ani_init(ah);
580 static bool ath9k_hw_devid_supported(u16 devid)
583 case AR5416_DEVID_PCI:
584 case AR5416_DEVID_PCIE:
585 case AR5416_AR9100_DEVID:
586 case AR9160_DEVID_PCI:
587 case AR9280_DEVID_PCI:
588 case AR9280_DEVID_PCIE:
589 case AR9285_DEVID_PCIE:
590 case AR5416_DEVID_AR9287_PCI:
591 case AR5416_DEVID_AR9287_PCIE:
599 static bool ath9k_hw_macversion_supported(u32 macversion)
601 switch (macversion) {
602 case AR_SREV_VERSION_5416_PCI:
603 case AR_SREV_VERSION_5416_PCIE:
604 case AR_SREV_VERSION_9160:
605 case AR_SREV_VERSION_9100:
606 case AR_SREV_VERSION_9280:
607 case AR_SREV_VERSION_9285:
608 case AR_SREV_VERSION_9287:
611 case AR_SREV_VERSION_9271:
618 static void ath9k_hw_init_cal_settings(struct ath_hw *ah)
620 if (AR_SREV_9160_10_OR_LATER(ah)) {
621 if (AR_SREV_9280_10_OR_LATER(ah)) {
622 ah->iq_caldata.calData = &iq_cal_single_sample;
623 ah->adcgain_caldata.calData =
624 &adc_gain_cal_single_sample;
625 ah->adcdc_caldata.calData =
626 &adc_dc_cal_single_sample;
627 ah->adcdc_calinitdata.calData =
630 ah->iq_caldata.calData = &iq_cal_multi_sample;
631 ah->adcgain_caldata.calData =
632 &adc_gain_cal_multi_sample;
633 ah->adcdc_caldata.calData =
634 &adc_dc_cal_multi_sample;
635 ah->adcdc_calinitdata.calData =
638 ah->supp_cals = ADC_GAIN_CAL | ADC_DC_CAL | IQ_MISMATCH_CAL;
642 static void ath9k_hw_init_mode_regs(struct ath_hw *ah)
644 if (AR_SREV_9271(ah)) {
645 INIT_INI_ARRAY(&ah->iniModes, ar9271Modes_9271_1_0,
646 ARRAY_SIZE(ar9271Modes_9271_1_0), 6);
647 INIT_INI_ARRAY(&ah->iniCommon, ar9271Common_9271_1_0,
648 ARRAY_SIZE(ar9271Common_9271_1_0), 2);
652 if (AR_SREV_9287_11_OR_LATER(ah)) {
653 INIT_INI_ARRAY(&ah->iniModes, ar9287Modes_9287_1_1,
654 ARRAY_SIZE(ar9287Modes_9287_1_1), 6);
655 INIT_INI_ARRAY(&ah->iniCommon, ar9287Common_9287_1_1,
656 ARRAY_SIZE(ar9287Common_9287_1_1), 2);
657 if (ah->config.pcie_clock_req)
658 INIT_INI_ARRAY(&ah->iniPcieSerdes,
659 ar9287PciePhy_clkreq_off_L1_9287_1_1,
660 ARRAY_SIZE(ar9287PciePhy_clkreq_off_L1_9287_1_1), 2);
662 INIT_INI_ARRAY(&ah->iniPcieSerdes,
663 ar9287PciePhy_clkreq_always_on_L1_9287_1_1,
664 ARRAY_SIZE(ar9287PciePhy_clkreq_always_on_L1_9287_1_1),
666 } else if (AR_SREV_9287_10_OR_LATER(ah)) {
667 INIT_INI_ARRAY(&ah->iniModes, ar9287Modes_9287_1_0,
668 ARRAY_SIZE(ar9287Modes_9287_1_0), 6);
669 INIT_INI_ARRAY(&ah->iniCommon, ar9287Common_9287_1_0,
670 ARRAY_SIZE(ar9287Common_9287_1_0), 2);
672 if (ah->config.pcie_clock_req)
673 INIT_INI_ARRAY(&ah->iniPcieSerdes,
674 ar9287PciePhy_clkreq_off_L1_9287_1_0,
675 ARRAY_SIZE(ar9287PciePhy_clkreq_off_L1_9287_1_0), 2);
677 INIT_INI_ARRAY(&ah->iniPcieSerdes,
678 ar9287PciePhy_clkreq_always_on_L1_9287_1_0,
679 ARRAY_SIZE(ar9287PciePhy_clkreq_always_on_L1_9287_1_0),
681 } else if (AR_SREV_9285_12_OR_LATER(ah)) {
684 INIT_INI_ARRAY(&ah->iniModes, ar9285Modes_9285_1_2,
685 ARRAY_SIZE(ar9285Modes_9285_1_2), 6);
686 INIT_INI_ARRAY(&ah->iniCommon, ar9285Common_9285_1_2,
687 ARRAY_SIZE(ar9285Common_9285_1_2), 2);
689 if (ah->config.pcie_clock_req) {
690 INIT_INI_ARRAY(&ah->iniPcieSerdes,
691 ar9285PciePhy_clkreq_off_L1_9285_1_2,
692 ARRAY_SIZE(ar9285PciePhy_clkreq_off_L1_9285_1_2), 2);
694 INIT_INI_ARRAY(&ah->iniPcieSerdes,
695 ar9285PciePhy_clkreq_always_on_L1_9285_1_2,
696 ARRAY_SIZE(ar9285PciePhy_clkreq_always_on_L1_9285_1_2),
699 } else if (AR_SREV_9285_10_OR_LATER(ah)) {
700 INIT_INI_ARRAY(&ah->iniModes, ar9285Modes_9285,
701 ARRAY_SIZE(ar9285Modes_9285), 6);
702 INIT_INI_ARRAY(&ah->iniCommon, ar9285Common_9285,
703 ARRAY_SIZE(ar9285Common_9285), 2);
705 if (ah->config.pcie_clock_req) {
706 INIT_INI_ARRAY(&ah->iniPcieSerdes,
707 ar9285PciePhy_clkreq_off_L1_9285,
708 ARRAY_SIZE(ar9285PciePhy_clkreq_off_L1_9285), 2);
710 INIT_INI_ARRAY(&ah->iniPcieSerdes,
711 ar9285PciePhy_clkreq_always_on_L1_9285,
712 ARRAY_SIZE(ar9285PciePhy_clkreq_always_on_L1_9285), 2);
714 } else if (AR_SREV_9280_20_OR_LATER(ah)) {
715 INIT_INI_ARRAY(&ah->iniModes, ar9280Modes_9280_2,
716 ARRAY_SIZE(ar9280Modes_9280_2), 6);
717 INIT_INI_ARRAY(&ah->iniCommon, ar9280Common_9280_2,
718 ARRAY_SIZE(ar9280Common_9280_2), 2);
720 if (ah->config.pcie_clock_req) {
721 INIT_INI_ARRAY(&ah->iniPcieSerdes,
722 ar9280PciePhy_clkreq_off_L1_9280,
723 ARRAY_SIZE(ar9280PciePhy_clkreq_off_L1_9280),2);
725 INIT_INI_ARRAY(&ah->iniPcieSerdes,
726 ar9280PciePhy_clkreq_always_on_L1_9280,
727 ARRAY_SIZE(ar9280PciePhy_clkreq_always_on_L1_9280), 2);
729 INIT_INI_ARRAY(&ah->iniModesAdditional,
730 ar9280Modes_fast_clock_9280_2,
731 ARRAY_SIZE(ar9280Modes_fast_clock_9280_2), 3);
732 } else if (AR_SREV_9280_10_OR_LATER(ah)) {
733 INIT_INI_ARRAY(&ah->iniModes, ar9280Modes_9280,
734 ARRAY_SIZE(ar9280Modes_9280), 6);
735 INIT_INI_ARRAY(&ah->iniCommon, ar9280Common_9280,
736 ARRAY_SIZE(ar9280Common_9280), 2);
737 } else if (AR_SREV_9160_10_OR_LATER(ah)) {
738 INIT_INI_ARRAY(&ah->iniModes, ar5416Modes_9160,
739 ARRAY_SIZE(ar5416Modes_9160), 6);
740 INIT_INI_ARRAY(&ah->iniCommon, ar5416Common_9160,
741 ARRAY_SIZE(ar5416Common_9160), 2);
742 INIT_INI_ARRAY(&ah->iniBank0, ar5416Bank0_9160,
743 ARRAY_SIZE(ar5416Bank0_9160), 2);
744 INIT_INI_ARRAY(&ah->iniBB_RfGain, ar5416BB_RfGain_9160,
745 ARRAY_SIZE(ar5416BB_RfGain_9160), 3);
746 INIT_INI_ARRAY(&ah->iniBank1, ar5416Bank1_9160,
747 ARRAY_SIZE(ar5416Bank1_9160), 2);
748 INIT_INI_ARRAY(&ah->iniBank2, ar5416Bank2_9160,
749 ARRAY_SIZE(ar5416Bank2_9160), 2);
750 INIT_INI_ARRAY(&ah->iniBank3, ar5416Bank3_9160,
751 ARRAY_SIZE(ar5416Bank3_9160), 3);
752 INIT_INI_ARRAY(&ah->iniBank6, ar5416Bank6_9160,
753 ARRAY_SIZE(ar5416Bank6_9160), 3);
754 INIT_INI_ARRAY(&ah->iniBank6TPC, ar5416Bank6TPC_9160,
755 ARRAY_SIZE(ar5416Bank6TPC_9160), 3);
756 INIT_INI_ARRAY(&ah->iniBank7, ar5416Bank7_9160,
757 ARRAY_SIZE(ar5416Bank7_9160), 2);
758 if (AR_SREV_9160_11(ah)) {
759 INIT_INI_ARRAY(&ah->iniAddac,
761 ARRAY_SIZE(ar5416Addac_91601_1), 2);
763 INIT_INI_ARRAY(&ah->iniAddac, ar5416Addac_9160,
764 ARRAY_SIZE(ar5416Addac_9160), 2);
766 } else if (AR_SREV_9100_OR_LATER(ah)) {
767 INIT_INI_ARRAY(&ah->iniModes, ar5416Modes_9100,
768 ARRAY_SIZE(ar5416Modes_9100), 6);
769 INIT_INI_ARRAY(&ah->iniCommon, ar5416Common_9100,
770 ARRAY_SIZE(ar5416Common_9100), 2);
771 INIT_INI_ARRAY(&ah->iniBank0, ar5416Bank0_9100,
772 ARRAY_SIZE(ar5416Bank0_9100), 2);
773 INIT_INI_ARRAY(&ah->iniBB_RfGain, ar5416BB_RfGain_9100,
774 ARRAY_SIZE(ar5416BB_RfGain_9100), 3);
775 INIT_INI_ARRAY(&ah->iniBank1, ar5416Bank1_9100,
776 ARRAY_SIZE(ar5416Bank1_9100), 2);
777 INIT_INI_ARRAY(&ah->iniBank2, ar5416Bank2_9100,
778 ARRAY_SIZE(ar5416Bank2_9100), 2);
779 INIT_INI_ARRAY(&ah->iniBank3, ar5416Bank3_9100,
780 ARRAY_SIZE(ar5416Bank3_9100), 3);
781 INIT_INI_ARRAY(&ah->iniBank6, ar5416Bank6_9100,
782 ARRAY_SIZE(ar5416Bank6_9100), 3);
783 INIT_INI_ARRAY(&ah->iniBank6TPC, ar5416Bank6TPC_9100,
784 ARRAY_SIZE(ar5416Bank6TPC_9100), 3);
785 INIT_INI_ARRAY(&ah->iniBank7, ar5416Bank7_9100,
786 ARRAY_SIZE(ar5416Bank7_9100), 2);
787 INIT_INI_ARRAY(&ah->iniAddac, ar5416Addac_9100,
788 ARRAY_SIZE(ar5416Addac_9100), 2);
790 INIT_INI_ARRAY(&ah->iniModes, ar5416Modes,
791 ARRAY_SIZE(ar5416Modes), 6);
792 INIT_INI_ARRAY(&ah->iniCommon, ar5416Common,
793 ARRAY_SIZE(ar5416Common), 2);
794 INIT_INI_ARRAY(&ah->iniBank0, ar5416Bank0,
795 ARRAY_SIZE(ar5416Bank0), 2);
796 INIT_INI_ARRAY(&ah->iniBB_RfGain, ar5416BB_RfGain,
797 ARRAY_SIZE(ar5416BB_RfGain), 3);
798 INIT_INI_ARRAY(&ah->iniBank1, ar5416Bank1,
799 ARRAY_SIZE(ar5416Bank1), 2);
800 INIT_INI_ARRAY(&ah->iniBank2, ar5416Bank2,
801 ARRAY_SIZE(ar5416Bank2), 2);
802 INIT_INI_ARRAY(&ah->iniBank3, ar5416Bank3,
803 ARRAY_SIZE(ar5416Bank3), 3);
804 INIT_INI_ARRAY(&ah->iniBank6, ar5416Bank6,
805 ARRAY_SIZE(ar5416Bank6), 3);
806 INIT_INI_ARRAY(&ah->iniBank6TPC, ar5416Bank6TPC,
807 ARRAY_SIZE(ar5416Bank6TPC), 3);
808 INIT_INI_ARRAY(&ah->iniBank7, ar5416Bank7,
809 ARRAY_SIZE(ar5416Bank7), 2);
810 INIT_INI_ARRAY(&ah->iniAddac, ar5416Addac,
811 ARRAY_SIZE(ar5416Addac), 2);
815 static void ath9k_hw_init_mode_gain_regs(struct ath_hw *ah)
817 if (AR_SREV_9287_11_OR_LATER(ah))
818 INIT_INI_ARRAY(&ah->iniModesRxGain,
819 ar9287Modes_rx_gain_9287_1_1,
820 ARRAY_SIZE(ar9287Modes_rx_gain_9287_1_1), 6);
821 else if (AR_SREV_9287_10(ah))
822 INIT_INI_ARRAY(&ah->iniModesRxGain,
823 ar9287Modes_rx_gain_9287_1_0,
824 ARRAY_SIZE(ar9287Modes_rx_gain_9287_1_0), 6);
825 else if (AR_SREV_9280_20(ah))
826 ath9k_hw_init_rxgain_ini(ah);
828 if (AR_SREV_9287_11_OR_LATER(ah)) {
829 INIT_INI_ARRAY(&ah->iniModesTxGain,
830 ar9287Modes_tx_gain_9287_1_1,
831 ARRAY_SIZE(ar9287Modes_tx_gain_9287_1_1), 6);
832 } else if (AR_SREV_9287_10(ah)) {
833 INIT_INI_ARRAY(&ah->iniModesTxGain,
834 ar9287Modes_tx_gain_9287_1_0,
835 ARRAY_SIZE(ar9287Modes_tx_gain_9287_1_0), 6);
836 } else if (AR_SREV_9280_20(ah)) {
837 ath9k_hw_init_txgain_ini(ah);
838 } else if (AR_SREV_9285_12_OR_LATER(ah)) {
839 u32 txgain_type = ah->eep_ops->get_eeprom(ah, EEP_TXGAIN_TYPE);
842 if (txgain_type == AR5416_EEP_TXGAIN_HIGH_POWER) {
843 INIT_INI_ARRAY(&ah->iniModesTxGain,
844 ar9285Modes_high_power_tx_gain_9285_1_2,
845 ARRAY_SIZE(ar9285Modes_high_power_tx_gain_9285_1_2), 6);
847 INIT_INI_ARRAY(&ah->iniModesTxGain,
848 ar9285Modes_original_tx_gain_9285_1_2,
849 ARRAY_SIZE(ar9285Modes_original_tx_gain_9285_1_2), 6);
855 static void ath9k_hw_init_11a_eeprom_fix(struct ath_hw *ah)
859 if ((ah->hw_version.devid == AR9280_DEVID_PCI) &&
860 test_bit(ATH9K_MODE_11A, ah->caps.wireless_modes)) {
863 for (i = 0; i < ah->iniModes.ia_rows; i++) {
864 u32 reg = INI_RA(&ah->iniModes, i, 0);
866 for (j = 1; j < ah->iniModes.ia_columns; j++) {
867 u32 val = INI_RA(&ah->iniModes, i, j);
869 INI_RA(&ah->iniModes, i, j) =
870 ath9k_hw_ini_fixup(ah,
878 int ath9k_hw_init(struct ath_hw *ah)
880 struct ath_common *common = ath9k_hw_common(ah);
883 if (!ath9k_hw_devid_supported(ah->hw_version.devid))
886 ath9k_hw_init_defaults(ah);
887 ath9k_hw_init_config(ah);
889 if (!ath9k_hw_set_reset_reg(ah, ATH9K_RESET_POWER_ON)) {
890 ath_print(common, ATH_DBG_FATAL,
891 "Couldn't reset chip\n");
895 if (!ath9k_hw_setpower(ah, ATH9K_PM_AWAKE)) {
896 ath_print(common, ATH_DBG_FATAL, "Couldn't wakeup chip\n");
900 if (ah->config.serialize_regmode == SER_REG_MODE_AUTO) {
901 if (ah->hw_version.macVersion == AR_SREV_VERSION_5416_PCI ||
902 (AR_SREV_9280(ah) && !ah->is_pciexpress)) {
903 ah->config.serialize_regmode =
906 ah->config.serialize_regmode =
911 ath_print(common, ATH_DBG_RESET, "serialize_regmode is %d\n",
912 ah->config.serialize_regmode);
914 if (!ath9k_hw_macversion_supported(ah->hw_version.macVersion)) {
915 ath_print(common, ATH_DBG_FATAL,
916 "Mac Chip Rev 0x%02x.%x is not supported by "
917 "this driver\n", ah->hw_version.macVersion,
918 ah->hw_version.macRev);
922 if (AR_SREV_9100(ah)) {
923 ah->iq_caldata.calData = &iq_cal_multi_sample;
924 ah->supp_cals = IQ_MISMATCH_CAL;
925 ah->is_pciexpress = false;
928 if (AR_SREV_9271(ah))
929 ah->is_pciexpress = false;
931 ah->hw_version.phyRev = REG_READ(ah, AR_PHY_CHIP_ID);
933 ath9k_hw_init_cal_settings(ah);
935 ah->ani_function = ATH9K_ANI_ALL;
936 if (AR_SREV_9280_10_OR_LATER(ah))
937 ah->ani_function &= ~ATH9K_ANI_NOISE_IMMUNITY_LEVEL;
939 ath9k_hw_init_mode_regs(ah);
941 if (ah->is_pciexpress)
942 ath9k_hw_configpcipowersave(ah, 0, 0);
944 ath9k_hw_disablepcie(ah);
946 r = ath9k_hw_post_init(ah);
950 ath9k_hw_init_mode_gain_regs(ah);
951 ath9k_hw_fill_cap_info(ah);
952 ath9k_hw_init_11a_eeprom_fix(ah);
954 r = ath9k_hw_init_macaddr(ah);
956 ath_print(common, ATH_DBG_FATAL,
957 "Failed to initialize MAC address\n");
961 if (AR_SREV_9285(ah) || AR_SREV_9271(ah))
962 ah->tx_trig_level = (AR_FTRIG_256B >> AR_FTRIG_S);
964 ah->tx_trig_level = (AR_FTRIG_512B >> AR_FTRIG_S);
966 ath9k_init_nfcal_hist_buffer(ah);
971 static void ath9k_hw_init_bb(struct ath_hw *ah,
972 struct ath9k_channel *chan)
976 synthDelay = REG_READ(ah, AR_PHY_RX_DELAY) & AR_PHY_RX_DELAY_DELAY;
978 synthDelay = (4 * synthDelay) / 22;
982 REG_WRITE(ah, AR_PHY_ACTIVE, AR_PHY_ACTIVE_EN);
984 udelay(synthDelay + BASE_ACTIVATE_DELAY);
987 static void ath9k_hw_init_qos(struct ath_hw *ah)
989 REG_WRITE(ah, AR_MIC_QOS_CONTROL, 0x100aa);
990 REG_WRITE(ah, AR_MIC_QOS_SELECT, 0x3210);
992 REG_WRITE(ah, AR_QOS_NO_ACK,
993 SM(2, AR_QOS_NO_ACK_TWO_BIT) |
994 SM(5, AR_QOS_NO_ACK_BIT_OFF) |
995 SM(0, AR_QOS_NO_ACK_BYTE_OFF));
997 REG_WRITE(ah, AR_TXOP_X, AR_TXOP_X_VAL);
998 REG_WRITE(ah, AR_TXOP_0_3, 0xFFFFFFFF);
999 REG_WRITE(ah, AR_TXOP_4_7, 0xFFFFFFFF);
1000 REG_WRITE(ah, AR_TXOP_8_11, 0xFFFFFFFF);
1001 REG_WRITE(ah, AR_TXOP_12_15, 0xFFFFFFFF);
1004 static void ath9k_hw_init_pll(struct ath_hw *ah,
1005 struct ath9k_channel *chan)
1009 if (AR_SREV_9100(ah)) {
1010 if (chan && IS_CHAN_5GHZ(chan))
1015 if (AR_SREV_9280_10_OR_LATER(ah)) {
1016 pll = SM(0x5, AR_RTC_9160_PLL_REFDIV);
1018 if (chan && IS_CHAN_HALF_RATE(chan))
1019 pll |= SM(0x1, AR_RTC_9160_PLL_CLKSEL);
1020 else if (chan && IS_CHAN_QUARTER_RATE(chan))
1021 pll |= SM(0x2, AR_RTC_9160_PLL_CLKSEL);
1023 if (chan && IS_CHAN_5GHZ(chan)) {
1024 pll |= SM(0x28, AR_RTC_9160_PLL_DIV);
1027 if (AR_SREV_9280_20(ah)) {
1028 if (((chan->channel % 20) == 0)
1029 || ((chan->channel % 10) == 0))
1035 pll |= SM(0x2c, AR_RTC_9160_PLL_DIV);
1038 } else if (AR_SREV_9160_10_OR_LATER(ah)) {
1040 pll = SM(0x5, AR_RTC_9160_PLL_REFDIV);
1042 if (chan && IS_CHAN_HALF_RATE(chan))
1043 pll |= SM(0x1, AR_RTC_9160_PLL_CLKSEL);
1044 else if (chan && IS_CHAN_QUARTER_RATE(chan))
1045 pll |= SM(0x2, AR_RTC_9160_PLL_CLKSEL);
1047 if (chan && IS_CHAN_5GHZ(chan))
1048 pll |= SM(0x50, AR_RTC_9160_PLL_DIV);
1050 pll |= SM(0x58, AR_RTC_9160_PLL_DIV);
1052 pll = AR_RTC_PLL_REFDIV_5 | AR_RTC_PLL_DIV2;
1054 if (chan && IS_CHAN_HALF_RATE(chan))
1055 pll |= SM(0x1, AR_RTC_PLL_CLKSEL);
1056 else if (chan && IS_CHAN_QUARTER_RATE(chan))
1057 pll |= SM(0x2, AR_RTC_PLL_CLKSEL);
1059 if (chan && IS_CHAN_5GHZ(chan))
1060 pll |= SM(0xa, AR_RTC_PLL_DIV);
1062 pll |= SM(0xb, AR_RTC_PLL_DIV);
1065 REG_WRITE(ah, AR_RTC_PLL_CONTROL, pll);
1067 udelay(RTC_PLL_SETTLE_DELAY);
1069 REG_WRITE(ah, AR_RTC_SLEEP_CLK, AR_RTC_FORCE_DERIVED_CLK);
1072 static void ath9k_hw_init_chain_masks(struct ath_hw *ah)
1074 int rx_chainmask, tx_chainmask;
1076 rx_chainmask = ah->rxchainmask;
1077 tx_chainmask = ah->txchainmask;
1079 switch (rx_chainmask) {
1081 REG_SET_BIT(ah, AR_PHY_ANALOG_SWAP,
1082 AR_PHY_SWAP_ALT_CHAIN);
1084 if (((ah)->hw_version.macVersion <= AR_SREV_VERSION_9160)) {
1085 REG_WRITE(ah, AR_PHY_RX_CHAINMASK, 0x7);
1086 REG_WRITE(ah, AR_PHY_CAL_CHAINMASK, 0x7);
1092 REG_WRITE(ah, AR_PHY_RX_CHAINMASK, rx_chainmask);
1093 REG_WRITE(ah, AR_PHY_CAL_CHAINMASK, rx_chainmask);
1099 REG_WRITE(ah, AR_SELFGEN_MASK, tx_chainmask);
1100 if (tx_chainmask == 0x5) {
1101 REG_SET_BIT(ah, AR_PHY_ANALOG_SWAP,
1102 AR_PHY_SWAP_ALT_CHAIN);
1104 if (AR_SREV_9100(ah))
1105 REG_WRITE(ah, AR_PHY_ANALOG_SWAP,
1106 REG_READ(ah, AR_PHY_ANALOG_SWAP) | 0x00000001);
1109 static void ath9k_hw_init_interrupt_masks(struct ath_hw *ah,
1110 enum nl80211_iftype opmode)
1112 ah->mask_reg = AR_IMR_TXERR |
1118 if (ah->config.intr_mitigation)
1119 ah->mask_reg |= AR_IMR_RXINTM | AR_IMR_RXMINTR;
1121 ah->mask_reg |= AR_IMR_RXOK;
1123 ah->mask_reg |= AR_IMR_TXOK;
1125 if (opmode == NL80211_IFTYPE_AP)
1126 ah->mask_reg |= AR_IMR_MIB;
1128 REG_WRITE(ah, AR_IMR, ah->mask_reg);
1129 REG_WRITE(ah, AR_IMR_S2, REG_READ(ah, AR_IMR_S2) | AR_IMR_S2_GTT);
1131 if (!AR_SREV_9100(ah)) {
1132 REG_WRITE(ah, AR_INTR_SYNC_CAUSE, 0xFFFFFFFF);
1133 REG_WRITE(ah, AR_INTR_SYNC_ENABLE, AR_INTR_SYNC_DEFAULT);
1134 REG_WRITE(ah, AR_INTR_SYNC_MASK, 0);
1138 static bool ath9k_hw_set_ack_timeout(struct ath_hw *ah, u32 us)
1140 if (us > ath9k_hw_mac_to_usec(ah, MS(0xffffffff, AR_TIME_OUT_ACK))) {
1141 ath_print(ath9k_hw_common(ah), ATH_DBG_RESET,
1142 "bad ack timeout %u\n", us);
1143 ah->acktimeout = (u32) -1;
1146 REG_RMW_FIELD(ah, AR_TIME_OUT,
1147 AR_TIME_OUT_ACK, ath9k_hw_mac_to_clks(ah, us));
1148 ah->acktimeout = us;
1153 static bool ath9k_hw_set_cts_timeout(struct ath_hw *ah, u32 us)
1155 if (us > ath9k_hw_mac_to_usec(ah, MS(0xffffffff, AR_TIME_OUT_CTS))) {
1156 ath_print(ath9k_hw_common(ah), ATH_DBG_RESET,
1157 "bad cts timeout %u\n", us);
1158 ah->ctstimeout = (u32) -1;
1161 REG_RMW_FIELD(ah, AR_TIME_OUT,
1162 AR_TIME_OUT_CTS, ath9k_hw_mac_to_clks(ah, us));
1163 ah->ctstimeout = us;
1168 static bool ath9k_hw_set_global_txtimeout(struct ath_hw *ah, u32 tu)
1171 ath_print(ath9k_hw_common(ah), ATH_DBG_XMIT,
1172 "bad global tx timeout %u\n", tu);
1173 ah->globaltxtimeout = (u32) -1;
1176 REG_RMW_FIELD(ah, AR_GTXTO, AR_GTXTO_TIMEOUT_LIMIT, tu);
1177 ah->globaltxtimeout = tu;
1182 static void ath9k_hw_init_user_settings(struct ath_hw *ah)
1184 ath_print(ath9k_hw_common(ah), ATH_DBG_RESET, "ah->misc_mode 0x%x\n",
1187 if (ah->misc_mode != 0)
1188 REG_WRITE(ah, AR_PCU_MISC,
1189 REG_READ(ah, AR_PCU_MISC) | ah->misc_mode);
1190 if (ah->slottime != (u32) -1)
1191 ath9k_hw_setslottime(ah, ah->slottime);
1192 if (ah->acktimeout != (u32) -1)
1193 ath9k_hw_set_ack_timeout(ah, ah->acktimeout);
1194 if (ah->ctstimeout != (u32) -1)
1195 ath9k_hw_set_cts_timeout(ah, ah->ctstimeout);
1196 if (ah->globaltxtimeout != (u32) -1)
1197 ath9k_hw_set_global_txtimeout(ah, ah->globaltxtimeout);
1200 const char *ath9k_hw_probe(u16 vendorid, u16 devid)
1202 return vendorid == ATHEROS_VENDOR_ID ?
1203 ath9k_hw_devname(devid) : NULL;
1206 void ath9k_hw_detach(struct ath_hw *ah)
1208 if (!AR_SREV_9100(ah))
1209 ath9k_hw_ani_disable(ah);
1211 ath9k_hw_rf_free(ah);
1212 ath9k_hw_setpower(ah, ATH9K_PM_FULL_SLEEP);
1221 static void ath9k_hw_override_ini(struct ath_hw *ah,
1222 struct ath9k_channel *chan)
1226 if (AR_SREV_9271(ah)) {
1228 * Enable spectral scan to solution for issues with stuck
1229 * beacons on AR9271 1.0. The beacon stuck issue is not seeon on
1232 if (AR_SREV_9271_10(ah)) {
1233 val = REG_READ(ah, AR_PHY_SPECTRAL_SCAN) | AR_PHY_SPECTRAL_SCAN_ENABLE;
1234 REG_WRITE(ah, AR_PHY_SPECTRAL_SCAN, val);
1236 else if (AR_SREV_9271_11(ah))
1238 * change AR_PHY_RF_CTL3 setting to fix MAC issue
1239 * present on AR9271 1.1
1241 REG_WRITE(ah, AR_PHY_RF_CTL3, 0x3a020001);
1246 * Set the RX_ABORT and RX_DIS and clear if off only after
1247 * RXE is set for MAC. This prevents frames with corrupted
1248 * descriptor status.
1250 REG_SET_BIT(ah, AR_DIAG_SW, (AR_DIAG_RX_DIS | AR_DIAG_RX_ABORT));
1252 if (AR_SREV_9280_10_OR_LATER(ah)) {
1253 val = REG_READ(ah, AR_PCU_MISC_MODE2) &
1254 (~AR_PCU_MISC_MODE2_HWWAR1);
1256 if (AR_SREV_9287_10_OR_LATER(ah))
1257 val = val & (~AR_PCU_MISC_MODE2_HWWAR2);
1259 REG_WRITE(ah, AR_PCU_MISC_MODE2, val);
1262 if (!AR_SREV_5416_20_OR_LATER(ah) ||
1263 AR_SREV_9280_10_OR_LATER(ah))
1266 * Disable BB clock gating
1267 * Necessary to avoid issues on AR5416 2.0
1269 REG_WRITE(ah, 0x9800 + (651 << 2), 0x11);
1272 static u32 ath9k_hw_def_ini_fixup(struct ath_hw *ah,
1273 struct ar5416_eeprom_def *pEepData,
1276 struct base_eep_header *pBase = &(pEepData->baseEepHeader);
1277 struct ath_common *common = ath9k_hw_common(ah);
1279 switch (ah->hw_version.devid) {
1280 case AR9280_DEVID_PCI:
1281 if (reg == 0x7894) {
1282 ath_print(common, ATH_DBG_EEPROM,
1283 "ini VAL: %x EEPROM: %x\n", value,
1284 (pBase->version & 0xff));
1286 if ((pBase->version & 0xff) > 0x0a) {
1287 ath_print(common, ATH_DBG_EEPROM,
1290 value &= ~AR_AN_TOP2_PWDCLKIND;
1291 value |= AR_AN_TOP2_PWDCLKIND &
1292 (pBase->pwdclkind << AR_AN_TOP2_PWDCLKIND_S);
1294 ath_print(common, ATH_DBG_EEPROM,
1295 "PWDCLKIND Earlier Rev\n");
1298 ath_print(common, ATH_DBG_EEPROM,
1299 "final ini VAL: %x\n", value);
1307 static u32 ath9k_hw_ini_fixup(struct ath_hw *ah,
1308 struct ar5416_eeprom_def *pEepData,
1311 if (ah->eep_map == EEP_MAP_4KBITS)
1314 return ath9k_hw_def_ini_fixup(ah, pEepData, reg, value);
1317 static void ath9k_olc_init(struct ath_hw *ah)
1321 if (OLC_FOR_AR9287_10_LATER) {
1322 REG_SET_BIT(ah, AR_PHY_TX_PWRCTRL9,
1323 AR_PHY_TX_PWRCTRL9_RES_DC_REMOVAL);
1324 ath9k_hw_analog_shift_rmw(ah, AR9287_AN_TXPC0,
1325 AR9287_AN_TXPC0_TXPCMODE,
1326 AR9287_AN_TXPC0_TXPCMODE_S,
1327 AR9287_AN_TXPC0_TXPCMODE_TEMPSENSE);
1330 for (i = 0; i < AR9280_TX_GAIN_TABLE_SIZE; i++)
1331 ah->originalGain[i] =
1332 MS(REG_READ(ah, AR_PHY_TX_GAIN_TBL1 + i * 4),
1338 static u32 ath9k_regd_get_ctl(struct ath_regulatory *reg,
1339 struct ath9k_channel *chan)
1341 u32 ctl = ath_regd_get_band_ctl(reg, chan->chan->band);
1343 if (IS_CHAN_B(chan))
1345 else if (IS_CHAN_G(chan))
1353 static int ath9k_hw_process_ini(struct ath_hw *ah,
1354 struct ath9k_channel *chan)
1356 struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah);
1357 int i, regWrites = 0;
1358 struct ieee80211_channel *channel = chan->chan;
1359 u32 modesIndex, freqIndex;
1361 switch (chan->chanmode) {
1363 case CHANNEL_A_HT20:
1367 case CHANNEL_A_HT40PLUS:
1368 case CHANNEL_A_HT40MINUS:
1373 case CHANNEL_G_HT20:
1378 case CHANNEL_G_HT40PLUS:
1379 case CHANNEL_G_HT40MINUS:
1388 REG_WRITE(ah, AR_PHY(0), 0x00000007);
1389 REG_WRITE(ah, AR_PHY_ADC_SERIAL_CTL, AR_PHY_SEL_EXTERNAL_RADIO);
1390 ah->eep_ops->set_addac(ah, chan);
1392 if (AR_SREV_5416_22_OR_LATER(ah)) {
1393 REG_WRITE_ARRAY(&ah->iniAddac, 1, regWrites);
1395 struct ar5416IniArray temp;
1397 sizeof(u32) * ah->iniAddac.ia_rows *
1398 ah->iniAddac.ia_columns;
1400 memcpy(ah->addac5416_21,
1401 ah->iniAddac.ia_array, addacSize);
1403 (ah->addac5416_21)[31 * ah->iniAddac.ia_columns + 1] = 0;
1405 temp.ia_array = ah->addac5416_21;
1406 temp.ia_columns = ah->iniAddac.ia_columns;
1407 temp.ia_rows = ah->iniAddac.ia_rows;
1408 REG_WRITE_ARRAY(&temp, 1, regWrites);
1411 REG_WRITE(ah, AR_PHY_ADC_SERIAL_CTL, AR_PHY_SEL_INTERNAL_ADDAC);
1413 for (i = 0; i < ah->iniModes.ia_rows; i++) {
1414 u32 reg = INI_RA(&ah->iniModes, i, 0);
1415 u32 val = INI_RA(&ah->iniModes, i, modesIndex);
1417 REG_WRITE(ah, reg, val);
1419 if (reg >= 0x7800 && reg < 0x78a0
1420 && ah->config.analog_shiftreg) {
1424 DO_DELAY(regWrites);
1427 if (AR_SREV_9280(ah) || AR_SREV_9287_10_OR_LATER(ah))
1428 REG_WRITE_ARRAY(&ah->iniModesRxGain, modesIndex, regWrites);
1430 if (AR_SREV_9280(ah) || AR_SREV_9285_12_OR_LATER(ah) ||
1431 AR_SREV_9287_10_OR_LATER(ah))
1432 REG_WRITE_ARRAY(&ah->iniModesTxGain, modesIndex, regWrites);
1434 for (i = 0; i < ah->iniCommon.ia_rows; i++) {
1435 u32 reg = INI_RA(&ah->iniCommon, i, 0);
1436 u32 val = INI_RA(&ah->iniCommon, i, 1);
1438 REG_WRITE(ah, reg, val);
1440 if (reg >= 0x7800 && reg < 0x78a0
1441 && ah->config.analog_shiftreg) {
1445 DO_DELAY(regWrites);
1448 ath9k_hw_write_regs(ah, modesIndex, freqIndex, regWrites);
1450 if (AR_SREV_9280_20(ah) && IS_CHAN_A_5MHZ_SPACED(chan)) {
1451 REG_WRITE_ARRAY(&ah->iniModesAdditional, modesIndex,
1455 ath9k_hw_override_ini(ah, chan);
1456 ath9k_hw_set_regs(ah, chan);
1457 ath9k_hw_init_chain_masks(ah);
1459 if (OLC_FOR_AR9280_20_LATER)
1462 ah->eep_ops->set_txpower(ah, chan,
1463 ath9k_regd_get_ctl(regulatory, chan),
1464 channel->max_antenna_gain * 2,
1465 channel->max_power * 2,
1466 min((u32) MAX_RATE_POWER,
1467 (u32) regulatory->power_limit));
1469 if (!ath9k_hw_set_rf_regs(ah, chan, freqIndex)) {
1470 ath_print(ath9k_hw_common(ah), ATH_DBG_FATAL,
1471 "ar5416SetRfRegs failed\n");
1478 /****************************************/
1479 /* Reset and Channel Switching Routines */
1480 /****************************************/
1482 static void ath9k_hw_set_rfmode(struct ath_hw *ah, struct ath9k_channel *chan)
1489 rfMode |= (IS_CHAN_B(chan) || IS_CHAN_G(chan))
1490 ? AR_PHY_MODE_DYNAMIC : AR_PHY_MODE_OFDM;
1492 if (!AR_SREV_9280_10_OR_LATER(ah))
1493 rfMode |= (IS_CHAN_5GHZ(chan)) ?
1494 AR_PHY_MODE_RF5GHZ : AR_PHY_MODE_RF2GHZ;
1496 if (AR_SREV_9280_20(ah) && IS_CHAN_A_5MHZ_SPACED(chan))
1497 rfMode |= (AR_PHY_MODE_DYNAMIC | AR_PHY_MODE_DYN_CCK_DISABLE);
1499 REG_WRITE(ah, AR_PHY_MODE, rfMode);
1502 static void ath9k_hw_mark_phy_inactive(struct ath_hw *ah)
1504 REG_WRITE(ah, AR_PHY_ACTIVE, AR_PHY_ACTIVE_DIS);
1507 static inline void ath9k_hw_set_dma(struct ath_hw *ah)
1512 * set AHB_MODE not to do cacheline prefetches
1514 regval = REG_READ(ah, AR_AHB_MODE);
1515 REG_WRITE(ah, AR_AHB_MODE, regval | AR_AHB_PREFETCH_RD_EN);
1518 * let mac dma reads be in 128 byte chunks
1520 regval = REG_READ(ah, AR_TXCFG) & ~AR_TXCFG_DMASZ_MASK;
1521 REG_WRITE(ah, AR_TXCFG, regval | AR_TXCFG_DMASZ_128B);
1524 * Restore TX Trigger Level to its pre-reset value.
1525 * The initial value depends on whether aggregation is enabled, and is
1526 * adjusted whenever underruns are detected.
1528 REG_RMW_FIELD(ah, AR_TXCFG, AR_FTRIG, ah->tx_trig_level);
1531 * let mac dma writes be in 128 byte chunks
1533 regval = REG_READ(ah, AR_RXCFG) & ~AR_RXCFG_DMASZ_MASK;
1534 REG_WRITE(ah, AR_RXCFG, regval | AR_RXCFG_DMASZ_128B);
1537 * Setup receive FIFO threshold to hold off TX activities
1539 REG_WRITE(ah, AR_RXFIFO_CFG, 0x200);
1542 * reduce the number of usable entries in PCU TXBUF to avoid
1543 * wrap around issues.
1545 if (AR_SREV_9285(ah)) {
1546 /* For AR9285 the number of Fifos are reduced to half.
1547 * So set the usable tx buf size also to half to
1548 * avoid data/delimiter underruns
1550 REG_WRITE(ah, AR_PCU_TXBUF_CTRL,
1551 AR_9285_PCU_TXBUF_CTRL_USABLE_SIZE);
1552 } else if (!AR_SREV_9271(ah)) {
1553 REG_WRITE(ah, AR_PCU_TXBUF_CTRL,
1554 AR_PCU_TXBUF_CTRL_USABLE_SIZE);
1558 static void ath9k_hw_set_operating_mode(struct ath_hw *ah, int opmode)
1562 val = REG_READ(ah, AR_STA_ID1);
1563 val &= ~(AR_STA_ID1_STA_AP | AR_STA_ID1_ADHOC);
1565 case NL80211_IFTYPE_AP:
1566 REG_WRITE(ah, AR_STA_ID1, val | AR_STA_ID1_STA_AP
1567 | AR_STA_ID1_KSRCH_MODE);
1568 REG_CLR_BIT(ah, AR_CFG, AR_CFG_AP_ADHOC_INDICATION);
1570 case NL80211_IFTYPE_ADHOC:
1571 case NL80211_IFTYPE_MESH_POINT:
1572 REG_WRITE(ah, AR_STA_ID1, val | AR_STA_ID1_ADHOC
1573 | AR_STA_ID1_KSRCH_MODE);
1574 REG_SET_BIT(ah, AR_CFG, AR_CFG_AP_ADHOC_INDICATION);
1576 case NL80211_IFTYPE_STATION:
1577 case NL80211_IFTYPE_MONITOR:
1578 REG_WRITE(ah, AR_STA_ID1, val | AR_STA_ID1_KSRCH_MODE);
1583 static inline void ath9k_hw_get_delta_slope_vals(struct ath_hw *ah,
1588 u32 coef_exp, coef_man;
1590 for (coef_exp = 31; coef_exp > 0; coef_exp--)
1591 if ((coef_scaled >> coef_exp) & 0x1)
1594 coef_exp = 14 - (coef_exp - COEF_SCALE_S);
1596 coef_man = coef_scaled + (1 << (COEF_SCALE_S - coef_exp - 1));
1598 *coef_mantissa = coef_man >> (COEF_SCALE_S - coef_exp);
1599 *coef_exponent = coef_exp - 16;
1602 static void ath9k_hw_set_delta_slope(struct ath_hw *ah,
1603 struct ath9k_channel *chan)
1605 u32 coef_scaled, ds_coef_exp, ds_coef_man;
1606 u32 clockMhzScaled = 0x64000000;
1607 struct chan_centers centers;
1609 if (IS_CHAN_HALF_RATE(chan))
1610 clockMhzScaled = clockMhzScaled >> 1;
1611 else if (IS_CHAN_QUARTER_RATE(chan))
1612 clockMhzScaled = clockMhzScaled >> 2;
1614 ath9k_hw_get_channel_centers(ah, chan, ¢ers);
1615 coef_scaled = clockMhzScaled / centers.synth_center;
1617 ath9k_hw_get_delta_slope_vals(ah, coef_scaled, &ds_coef_man,
1620 REG_RMW_FIELD(ah, AR_PHY_TIMING3,
1621 AR_PHY_TIMING3_DSC_MAN, ds_coef_man);
1622 REG_RMW_FIELD(ah, AR_PHY_TIMING3,
1623 AR_PHY_TIMING3_DSC_EXP, ds_coef_exp);
1625 coef_scaled = (9 * coef_scaled) / 10;
1627 ath9k_hw_get_delta_slope_vals(ah, coef_scaled, &ds_coef_man,
1630 REG_RMW_FIELD(ah, AR_PHY_HALFGI,
1631 AR_PHY_HALFGI_DSC_MAN, ds_coef_man);
1632 REG_RMW_FIELD(ah, AR_PHY_HALFGI,
1633 AR_PHY_HALFGI_DSC_EXP, ds_coef_exp);
1636 static bool ath9k_hw_set_reset(struct ath_hw *ah, int type)
1641 if (AR_SREV_9100(ah)) {
1642 u32 val = REG_READ(ah, AR_RTC_DERIVED_CLK);
1643 val &= ~AR_RTC_DERIVED_CLK_PERIOD;
1644 val |= SM(1, AR_RTC_DERIVED_CLK_PERIOD);
1645 REG_WRITE(ah, AR_RTC_DERIVED_CLK, val);
1646 (void)REG_READ(ah, AR_RTC_DERIVED_CLK);
1649 REG_WRITE(ah, AR_RTC_FORCE_WAKE, AR_RTC_FORCE_WAKE_EN |
1650 AR_RTC_FORCE_WAKE_ON_INT);
1652 if (AR_SREV_9100(ah)) {
1653 rst_flags = AR_RTC_RC_MAC_WARM | AR_RTC_RC_MAC_COLD |
1654 AR_RTC_RC_COLD_RESET | AR_RTC_RC_WARM_RESET;
1656 tmpReg = REG_READ(ah, AR_INTR_SYNC_CAUSE);
1658 (AR_INTR_SYNC_LOCAL_TIMEOUT |
1659 AR_INTR_SYNC_RADM_CPL_TIMEOUT)) {
1660 REG_WRITE(ah, AR_INTR_SYNC_ENABLE, 0);
1661 REG_WRITE(ah, AR_RC, AR_RC_AHB | AR_RC_HOSTIF);
1663 REG_WRITE(ah, AR_RC, AR_RC_AHB);
1666 rst_flags = AR_RTC_RC_MAC_WARM;
1667 if (type == ATH9K_RESET_COLD)
1668 rst_flags |= AR_RTC_RC_MAC_COLD;
1671 REG_WRITE(ah, AR_RTC_RC, rst_flags);
1674 REG_WRITE(ah, AR_RTC_RC, 0);
1675 if (!ath9k_hw_wait(ah, AR_RTC_RC, AR_RTC_RC_M, 0, AH_WAIT_TIMEOUT)) {
1676 ath_print(ath9k_hw_common(ah), ATH_DBG_RESET,
1677 "RTC stuck in MAC reset\n");
1681 if (!AR_SREV_9100(ah))
1682 REG_WRITE(ah, AR_RC, 0);
1684 ath9k_hw_init_pll(ah, NULL);
1686 if (AR_SREV_9100(ah))
1692 static bool ath9k_hw_set_reset_power_on(struct ath_hw *ah)
1694 REG_WRITE(ah, AR_RTC_FORCE_WAKE, AR_RTC_FORCE_WAKE_EN |
1695 AR_RTC_FORCE_WAKE_ON_INT);
1697 if (!AR_SREV_9100(ah))
1698 REG_WRITE(ah, AR_RC, AR_RC_AHB);
1700 REG_WRITE(ah, AR_RTC_RESET, 0);
1703 if (!AR_SREV_9100(ah))
1704 REG_WRITE(ah, AR_RC, 0);
1706 REG_WRITE(ah, AR_RTC_RESET, 1);
1708 if (!ath9k_hw_wait(ah,
1713 ath_print(ath9k_hw_common(ah), ATH_DBG_RESET,
1714 "RTC not waking up\n");
1718 ath9k_hw_read_revisions(ah);
1720 return ath9k_hw_set_reset(ah, ATH9K_RESET_WARM);
1723 static bool ath9k_hw_set_reset_reg(struct ath_hw *ah, u32 type)
1725 REG_WRITE(ah, AR_RTC_FORCE_WAKE,
1726 AR_RTC_FORCE_WAKE_EN | AR_RTC_FORCE_WAKE_ON_INT);
1729 case ATH9K_RESET_POWER_ON:
1730 return ath9k_hw_set_reset_power_on(ah);
1731 case ATH9K_RESET_WARM:
1732 case ATH9K_RESET_COLD:
1733 return ath9k_hw_set_reset(ah, type);
1739 static void ath9k_hw_set_regs(struct ath_hw *ah, struct ath9k_channel *chan)
1742 u32 enableDacFifo = 0;
1744 if (AR_SREV_9285_10_OR_LATER(ah))
1745 enableDacFifo = (REG_READ(ah, AR_PHY_TURBO) &
1746 AR_PHY_FC_ENABLE_DAC_FIFO);
1748 phymode = AR_PHY_FC_HT_EN | AR_PHY_FC_SHORT_GI_40
1749 | AR_PHY_FC_SINGLE_HT_LTF1 | AR_PHY_FC_WALSH | enableDacFifo;
1751 if (IS_CHAN_HT40(chan)) {
1752 phymode |= AR_PHY_FC_DYN2040_EN;
1754 if ((chan->chanmode == CHANNEL_A_HT40PLUS) ||
1755 (chan->chanmode == CHANNEL_G_HT40PLUS))
1756 phymode |= AR_PHY_FC_DYN2040_PRI_CH;
1759 REG_WRITE(ah, AR_PHY_TURBO, phymode);
1761 ath9k_hw_set11nmac2040(ah);
1763 REG_WRITE(ah, AR_GTXTO, 25 << AR_GTXTO_TIMEOUT_LIMIT_S);
1764 REG_WRITE(ah, AR_CST, 0xF << AR_CST_TIMEOUT_LIMIT_S);
1767 static bool ath9k_hw_chip_reset(struct ath_hw *ah,
1768 struct ath9k_channel *chan)
1770 if (AR_SREV_9280(ah) && ah->eep_ops->get_eeprom(ah, EEP_OL_PWRCTRL)) {
1771 if (!ath9k_hw_set_reset_reg(ah, ATH9K_RESET_POWER_ON))
1773 } else if (!ath9k_hw_set_reset_reg(ah, ATH9K_RESET_WARM))
1776 if (!ath9k_hw_setpower(ah, ATH9K_PM_AWAKE))
1779 ah->chip_fullsleep = false;
1780 ath9k_hw_init_pll(ah, chan);
1781 ath9k_hw_set_rfmode(ah, chan);
1786 static bool ath9k_hw_channel_change(struct ath_hw *ah,
1787 struct ath9k_channel *chan)
1789 struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah);
1790 struct ath_common *common = ath9k_hw_common(ah);
1791 struct ieee80211_channel *channel = chan->chan;
1792 u32 synthDelay, qnum;
1794 for (qnum = 0; qnum < AR_NUM_QCU; qnum++) {
1795 if (ath9k_hw_numtxpending(ah, qnum)) {
1796 ath_print(common, ATH_DBG_QUEUE,
1797 "Transmit frames pending on "
1798 "queue %d\n", qnum);
1803 REG_WRITE(ah, AR_PHY_RFBUS_REQ, AR_PHY_RFBUS_REQ_EN);
1804 if (!ath9k_hw_wait(ah, AR_PHY_RFBUS_GRANT, AR_PHY_RFBUS_GRANT_EN,
1805 AR_PHY_RFBUS_GRANT_EN, AH_WAIT_TIMEOUT)) {
1806 ath_print(common, ATH_DBG_FATAL,
1807 "Could not kill baseband RX\n");
1811 ath9k_hw_set_regs(ah, chan);
1813 if (AR_SREV_9280_10_OR_LATER(ah)) {
1814 ath9k_hw_ar9280_set_channel(ah, chan);
1816 if (!(ath9k_hw_set_channel(ah, chan))) {
1817 ath_print(common, ATH_DBG_FATAL,
1818 "Failed to set channel\n");
1823 ah->eep_ops->set_txpower(ah, chan,
1824 ath9k_regd_get_ctl(regulatory, chan),
1825 channel->max_antenna_gain * 2,
1826 channel->max_power * 2,
1827 min((u32) MAX_RATE_POWER,
1828 (u32) regulatory->power_limit));
1830 synthDelay = REG_READ(ah, AR_PHY_RX_DELAY) & AR_PHY_RX_DELAY_DELAY;
1831 if (IS_CHAN_B(chan))
1832 synthDelay = (4 * synthDelay) / 22;
1836 udelay(synthDelay + BASE_ACTIVATE_DELAY);
1838 REG_WRITE(ah, AR_PHY_RFBUS_REQ, 0);
1840 if (IS_CHAN_OFDM(chan) || IS_CHAN_HT(chan))
1841 ath9k_hw_set_delta_slope(ah, chan);
1843 if (AR_SREV_9280_10_OR_LATER(ah))
1844 ath9k_hw_9280_spur_mitigate(ah, chan);
1846 ath9k_hw_spur_mitigate(ah, chan);
1848 if (!chan->oneTimeCalsDone)
1849 chan->oneTimeCalsDone = true;
1854 static void ath9k_hw_9280_spur_mitigate(struct ath_hw *ah, struct ath9k_channel *chan)
1856 int bb_spur = AR_NO_SPUR;
1859 int bb_spur_off, spur_subchannel_sd;
1861 int spur_delta_phase;
1863 int upper, lower, cur_vit_mask;
1866 int pilot_mask_reg[4] = { AR_PHY_TIMING7, AR_PHY_TIMING8,
1867 AR_PHY_PILOT_MASK_01_30, AR_PHY_PILOT_MASK_31_60
1869 int chan_mask_reg[4] = { AR_PHY_TIMING9, AR_PHY_TIMING10,
1870 AR_PHY_CHANNEL_MASK_01_30, AR_PHY_CHANNEL_MASK_31_60
1872 int inc[4] = { 0, 100, 0, 0 };
1873 struct chan_centers centers;
1880 bool is2GHz = IS_CHAN_2GHZ(chan);
1882 memset(&mask_m, 0, sizeof(int8_t) * 123);
1883 memset(&mask_p, 0, sizeof(int8_t) * 123);
1885 ath9k_hw_get_channel_centers(ah, chan, ¢ers);
1886 freq = centers.synth_center;
1888 ah->config.spurmode = SPUR_ENABLE_EEPROM;
1889 for (i = 0; i < AR_EEPROM_MODAL_SPURS; i++) {
1890 cur_bb_spur = ah->eep_ops->get_spur_channel(ah, i, is2GHz);
1893 cur_bb_spur = (cur_bb_spur / 10) + AR_BASE_FREQ_2GHZ;
1895 cur_bb_spur = (cur_bb_spur / 10) + AR_BASE_FREQ_5GHZ;
1897 if (AR_NO_SPUR == cur_bb_spur)
1899 cur_bb_spur = cur_bb_spur - freq;
1901 if (IS_CHAN_HT40(chan)) {
1902 if ((cur_bb_spur > -AR_SPUR_FEEQ_BOUND_HT40) &&
1903 (cur_bb_spur < AR_SPUR_FEEQ_BOUND_HT40)) {
1904 bb_spur = cur_bb_spur;
1907 } else if ((cur_bb_spur > -AR_SPUR_FEEQ_BOUND_HT20) &&
1908 (cur_bb_spur < AR_SPUR_FEEQ_BOUND_HT20)) {
1909 bb_spur = cur_bb_spur;
1914 if (AR_NO_SPUR == bb_spur) {
1915 REG_CLR_BIT(ah, AR_PHY_FORCE_CLKEN_CCK,
1916 AR_PHY_FORCE_CLKEN_CCK_MRC_MUX);
1919 REG_CLR_BIT(ah, AR_PHY_FORCE_CLKEN_CCK,
1920 AR_PHY_FORCE_CLKEN_CCK_MRC_MUX);
1923 bin = bb_spur * 320;
1925 tmp = REG_READ(ah, AR_PHY_TIMING_CTRL4(0));
1927 newVal = tmp | (AR_PHY_TIMING_CTRL4_ENABLE_SPUR_RSSI |
1928 AR_PHY_TIMING_CTRL4_ENABLE_SPUR_FILTER |
1929 AR_PHY_TIMING_CTRL4_ENABLE_CHAN_MASK |
1930 AR_PHY_TIMING_CTRL4_ENABLE_PILOT_MASK);
1931 REG_WRITE(ah, AR_PHY_TIMING_CTRL4(0), newVal);
1933 newVal = (AR_PHY_SPUR_REG_MASK_RATE_CNTL |
1934 AR_PHY_SPUR_REG_ENABLE_MASK_PPM |
1935 AR_PHY_SPUR_REG_MASK_RATE_SELECT |
1936 AR_PHY_SPUR_REG_ENABLE_VIT_SPUR_RSSI |
1937 SM(SPUR_RSSI_THRESH, AR_PHY_SPUR_REG_SPUR_RSSI_THRESH));
1938 REG_WRITE(ah, AR_PHY_SPUR_REG, newVal);
1940 if (IS_CHAN_HT40(chan)) {
1942 spur_subchannel_sd = 1;
1943 bb_spur_off = bb_spur + 10;
1945 spur_subchannel_sd = 0;
1946 bb_spur_off = bb_spur - 10;
1949 spur_subchannel_sd = 0;
1950 bb_spur_off = bb_spur;
1953 if (IS_CHAN_HT40(chan))
1955 ((bb_spur * 262144) /
1956 10) & AR_PHY_TIMING11_SPUR_DELTA_PHASE;
1959 ((bb_spur * 524288) /
1960 10) & AR_PHY_TIMING11_SPUR_DELTA_PHASE;
1962 denominator = IS_CHAN_2GHZ(chan) ? 44 : 40;
1963 spur_freq_sd = ((bb_spur_off * 2048) / denominator) & 0x3ff;
1965 newVal = (AR_PHY_TIMING11_USE_SPUR_IN_AGC |
1966 SM(spur_freq_sd, AR_PHY_TIMING11_SPUR_FREQ_SD) |
1967 SM(spur_delta_phase, AR_PHY_TIMING11_SPUR_DELTA_PHASE));
1968 REG_WRITE(ah, AR_PHY_TIMING11, newVal);
1970 newVal = spur_subchannel_sd << AR_PHY_SFCORR_SPUR_SUBCHNL_SD_S;
1971 REG_WRITE(ah, AR_PHY_SFCORR_EXT, newVal);
1977 for (i = 0; i < 4; i++) {
1981 for (bp = 0; bp < 30; bp++) {
1982 if ((cur_bin > lower) && (cur_bin < upper)) {
1983 pilot_mask = pilot_mask | 0x1 << bp;
1984 chan_mask = chan_mask | 0x1 << bp;
1989 REG_WRITE(ah, pilot_mask_reg[i], pilot_mask);
1990 REG_WRITE(ah, chan_mask_reg[i], chan_mask);
1993 cur_vit_mask = 6100;
1997 for (i = 0; i < 123; i++) {
1998 if ((cur_vit_mask > lower) && (cur_vit_mask < upper)) {
2000 /* workaround for gcc bug #37014 */
2001 volatile int tmp_v = abs(cur_vit_mask - bin);
2007 if (cur_vit_mask < 0)
2008 mask_m[abs(cur_vit_mask / 100)] = mask_amt;
2010 mask_p[cur_vit_mask / 100] = mask_amt;
2012 cur_vit_mask -= 100;
2015 tmp_mask = (mask_m[46] << 30) | (mask_m[47] << 28)
2016 | (mask_m[48] << 26) | (mask_m[49] << 24)
2017 | (mask_m[50] << 22) | (mask_m[51] << 20)
2018 | (mask_m[52] << 18) | (mask_m[53] << 16)
2019 | (mask_m[54] << 14) | (mask_m[55] << 12)
2020 | (mask_m[56] << 10) | (mask_m[57] << 8)
2021 | (mask_m[58] << 6) | (mask_m[59] << 4)
2022 | (mask_m[60] << 2) | (mask_m[61] << 0);
2023 REG_WRITE(ah, AR_PHY_BIN_MASK_1, tmp_mask);
2024 REG_WRITE(ah, AR_PHY_VIT_MASK2_M_46_61, tmp_mask);
2026 tmp_mask = (mask_m[31] << 28)
2027 | (mask_m[32] << 26) | (mask_m[33] << 24)
2028 | (mask_m[34] << 22) | (mask_m[35] << 20)
2029 | (mask_m[36] << 18) | (mask_m[37] << 16)
2030 | (mask_m[48] << 14) | (mask_m[39] << 12)
2031 | (mask_m[40] << 10) | (mask_m[41] << 8)
2032 | (mask_m[42] << 6) | (mask_m[43] << 4)
2033 | (mask_m[44] << 2) | (mask_m[45] << 0);
2034 REG_WRITE(ah, AR_PHY_BIN_MASK_2, tmp_mask);
2035 REG_WRITE(ah, AR_PHY_MASK2_M_31_45, tmp_mask);
2037 tmp_mask = (mask_m[16] << 30) | (mask_m[16] << 28)
2038 | (mask_m[18] << 26) | (mask_m[18] << 24)
2039 | (mask_m[20] << 22) | (mask_m[20] << 20)
2040 | (mask_m[22] << 18) | (mask_m[22] << 16)
2041 | (mask_m[24] << 14) | (mask_m[24] << 12)
2042 | (mask_m[25] << 10) | (mask_m[26] << 8)
2043 | (mask_m[27] << 6) | (mask_m[28] << 4)
2044 | (mask_m[29] << 2) | (mask_m[30] << 0);
2045 REG_WRITE(ah, AR_PHY_BIN_MASK_3, tmp_mask);
2046 REG_WRITE(ah, AR_PHY_MASK2_M_16_30, tmp_mask);
2048 tmp_mask = (mask_m[0] << 30) | (mask_m[1] << 28)
2049 | (mask_m[2] << 26) | (mask_m[3] << 24)
2050 | (mask_m[4] << 22) | (mask_m[5] << 20)
2051 | (mask_m[6] << 18) | (mask_m[7] << 16)
2052 | (mask_m[8] << 14) | (mask_m[9] << 12)
2053 | (mask_m[10] << 10) | (mask_m[11] << 8)
2054 | (mask_m[12] << 6) | (mask_m[13] << 4)
2055 | (mask_m[14] << 2) | (mask_m[15] << 0);
2056 REG_WRITE(ah, AR_PHY_MASK_CTL, tmp_mask);
2057 REG_WRITE(ah, AR_PHY_MASK2_M_00_15, tmp_mask);
2059 tmp_mask = (mask_p[15] << 28)
2060 | (mask_p[14] << 26) | (mask_p[13] << 24)
2061 | (mask_p[12] << 22) | (mask_p[11] << 20)
2062 | (mask_p[10] << 18) | (mask_p[9] << 16)
2063 | (mask_p[8] << 14) | (mask_p[7] << 12)
2064 | (mask_p[6] << 10) | (mask_p[5] << 8)
2065 | (mask_p[4] << 6) | (mask_p[3] << 4)
2066 | (mask_p[2] << 2) | (mask_p[1] << 0);
2067 REG_WRITE(ah, AR_PHY_BIN_MASK2_1, tmp_mask);
2068 REG_WRITE(ah, AR_PHY_MASK2_P_15_01, tmp_mask);
2070 tmp_mask = (mask_p[30] << 28)
2071 | (mask_p[29] << 26) | (mask_p[28] << 24)
2072 | (mask_p[27] << 22) | (mask_p[26] << 20)
2073 | (mask_p[25] << 18) | (mask_p[24] << 16)
2074 | (mask_p[23] << 14) | (mask_p[22] << 12)
2075 | (mask_p[21] << 10) | (mask_p[20] << 8)
2076 | (mask_p[19] << 6) | (mask_p[18] << 4)
2077 | (mask_p[17] << 2) | (mask_p[16] << 0);
2078 REG_WRITE(ah, AR_PHY_BIN_MASK2_2, tmp_mask);
2079 REG_WRITE(ah, AR_PHY_MASK2_P_30_16, tmp_mask);
2081 tmp_mask = (mask_p[45] << 28)
2082 | (mask_p[44] << 26) | (mask_p[43] << 24)
2083 | (mask_p[42] << 22) | (mask_p[41] << 20)
2084 | (mask_p[40] << 18) | (mask_p[39] << 16)
2085 | (mask_p[38] << 14) | (mask_p[37] << 12)
2086 | (mask_p[36] << 10) | (mask_p[35] << 8)
2087 | (mask_p[34] << 6) | (mask_p[33] << 4)
2088 | (mask_p[32] << 2) | (mask_p[31] << 0);
2089 REG_WRITE(ah, AR_PHY_BIN_MASK2_3, tmp_mask);
2090 REG_WRITE(ah, AR_PHY_MASK2_P_45_31, tmp_mask);
2092 tmp_mask = (mask_p[61] << 30) | (mask_p[60] << 28)
2093 | (mask_p[59] << 26) | (mask_p[58] << 24)
2094 | (mask_p[57] << 22) | (mask_p[56] << 20)
2095 | (mask_p[55] << 18) | (mask_p[54] << 16)
2096 | (mask_p[53] << 14) | (mask_p[52] << 12)
2097 | (mask_p[51] << 10) | (mask_p[50] << 8)
2098 | (mask_p[49] << 6) | (mask_p[48] << 4)
2099 | (mask_p[47] << 2) | (mask_p[46] << 0);
2100 REG_WRITE(ah, AR_PHY_BIN_MASK2_4, tmp_mask);
2101 REG_WRITE(ah, AR_PHY_MASK2_P_61_45, tmp_mask);
2104 static void ath9k_hw_spur_mitigate(struct ath_hw *ah, struct ath9k_channel *chan)
2106 int bb_spur = AR_NO_SPUR;
2109 int spur_delta_phase;
2111 int upper, lower, cur_vit_mask;
2114 int pilot_mask_reg[4] = { AR_PHY_TIMING7, AR_PHY_TIMING8,
2115 AR_PHY_PILOT_MASK_01_30, AR_PHY_PILOT_MASK_31_60
2117 int chan_mask_reg[4] = { AR_PHY_TIMING9, AR_PHY_TIMING10,
2118 AR_PHY_CHANNEL_MASK_01_30, AR_PHY_CHANNEL_MASK_31_60
2120 int inc[4] = { 0, 100, 0, 0 };
2127 bool is2GHz = IS_CHAN_2GHZ(chan);
2129 memset(&mask_m, 0, sizeof(int8_t) * 123);
2130 memset(&mask_p, 0, sizeof(int8_t) * 123);
2132 for (i = 0; i < AR_EEPROM_MODAL_SPURS; i++) {
2133 cur_bb_spur = ah->eep_ops->get_spur_channel(ah, i, is2GHz);
2134 if (AR_NO_SPUR == cur_bb_spur)
2136 cur_bb_spur = cur_bb_spur - (chan->channel * 10);
2137 if ((cur_bb_spur > -95) && (cur_bb_spur < 95)) {
2138 bb_spur = cur_bb_spur;
2143 if (AR_NO_SPUR == bb_spur)
2148 tmp = REG_READ(ah, AR_PHY_TIMING_CTRL4(0));
2149 new = tmp | (AR_PHY_TIMING_CTRL4_ENABLE_SPUR_RSSI |
2150 AR_PHY_TIMING_CTRL4_ENABLE_SPUR_FILTER |
2151 AR_PHY_TIMING_CTRL4_ENABLE_CHAN_MASK |
2152 AR_PHY_TIMING_CTRL4_ENABLE_PILOT_MASK);
2154 REG_WRITE(ah, AR_PHY_TIMING_CTRL4(0), new);
2156 new = (AR_PHY_SPUR_REG_MASK_RATE_CNTL |
2157 AR_PHY_SPUR_REG_ENABLE_MASK_PPM |
2158 AR_PHY_SPUR_REG_MASK_RATE_SELECT |
2159 AR_PHY_SPUR_REG_ENABLE_VIT_SPUR_RSSI |
2160 SM(SPUR_RSSI_THRESH, AR_PHY_SPUR_REG_SPUR_RSSI_THRESH));
2161 REG_WRITE(ah, AR_PHY_SPUR_REG, new);
2163 spur_delta_phase = ((bb_spur * 524288) / 100) &
2164 AR_PHY_TIMING11_SPUR_DELTA_PHASE;
2166 denominator = IS_CHAN_2GHZ(chan) ? 440 : 400;
2167 spur_freq_sd = ((bb_spur * 2048) / denominator) & 0x3ff;
2169 new = (AR_PHY_TIMING11_USE_SPUR_IN_AGC |
2170 SM(spur_freq_sd, AR_PHY_TIMING11_SPUR_FREQ_SD) |
2171 SM(spur_delta_phase, AR_PHY_TIMING11_SPUR_DELTA_PHASE));
2172 REG_WRITE(ah, AR_PHY_TIMING11, new);
2178 for (i = 0; i < 4; i++) {
2182 for (bp = 0; bp < 30; bp++) {
2183 if ((cur_bin > lower) && (cur_bin < upper)) {
2184 pilot_mask = pilot_mask | 0x1 << bp;
2185 chan_mask = chan_mask | 0x1 << bp;
2190 REG_WRITE(ah, pilot_mask_reg[i], pilot_mask);
2191 REG_WRITE(ah, chan_mask_reg[i], chan_mask);
2194 cur_vit_mask = 6100;
2198 for (i = 0; i < 123; i++) {
2199 if ((cur_vit_mask > lower) && (cur_vit_mask < upper)) {
2201 /* workaround for gcc bug #37014 */
2202 volatile int tmp_v = abs(cur_vit_mask - bin);
2208 if (cur_vit_mask < 0)
2209 mask_m[abs(cur_vit_mask / 100)] = mask_amt;
2211 mask_p[cur_vit_mask / 100] = mask_amt;
2213 cur_vit_mask -= 100;
2216 tmp_mask = (mask_m[46] << 30) | (mask_m[47] << 28)
2217 | (mask_m[48] << 26) | (mask_m[49] << 24)
2218 | (mask_m[50] << 22) | (mask_m[51] << 20)
2219 | (mask_m[52] << 18) | (mask_m[53] << 16)
2220 | (mask_m[54] << 14) | (mask_m[55] << 12)
2221 | (mask_m[56] << 10) | (mask_m[57] << 8)
2222 | (mask_m[58] << 6) | (mask_m[59] << 4)
2223 | (mask_m[60] << 2) | (mask_m[61] << 0);
2224 REG_WRITE(ah, AR_PHY_BIN_MASK_1, tmp_mask);
2225 REG_WRITE(ah, AR_PHY_VIT_MASK2_M_46_61, tmp_mask);
2227 tmp_mask = (mask_m[31] << 28)
2228 | (mask_m[32] << 26) | (mask_m[33] << 24)
2229 | (mask_m[34] << 22) | (mask_m[35] << 20)
2230 | (mask_m[36] << 18) | (mask_m[37] << 16)
2231 | (mask_m[48] << 14) | (mask_m[39] << 12)
2232 | (mask_m[40] << 10) | (mask_m[41] << 8)
2233 | (mask_m[42] << 6) | (mask_m[43] << 4)
2234 | (mask_m[44] << 2) | (mask_m[45] << 0);
2235 REG_WRITE(ah, AR_PHY_BIN_MASK_2, tmp_mask);
2236 REG_WRITE(ah, AR_PHY_MASK2_M_31_45, tmp_mask);
2238 tmp_mask = (mask_m[16] << 30) | (mask_m[16] << 28)
2239 | (mask_m[18] << 26) | (mask_m[18] << 24)
2240 | (mask_m[20] << 22) | (mask_m[20] << 20)
2241 | (mask_m[22] << 18) | (mask_m[22] << 16)
2242 | (mask_m[24] << 14) | (mask_m[24] << 12)
2243 | (mask_m[25] << 10) | (mask_m[26] << 8)
2244 | (mask_m[27] << 6) | (mask_m[28] << 4)
2245 | (mask_m[29] << 2) | (mask_m[30] << 0);
2246 REG_WRITE(ah, AR_PHY_BIN_MASK_3, tmp_mask);
2247 REG_WRITE(ah, AR_PHY_MASK2_M_16_30, tmp_mask);
2249 tmp_mask = (mask_m[0] << 30) | (mask_m[1] << 28)
2250 | (mask_m[2] << 26) | (mask_m[3] << 24)
2251 | (mask_m[4] << 22) | (mask_m[5] << 20)
2252 | (mask_m[6] << 18) | (mask_m[7] << 16)
2253 | (mask_m[8] << 14) | (mask_m[9] << 12)
2254 | (mask_m[10] << 10) | (mask_m[11] << 8)
2255 | (mask_m[12] << 6) | (mask_m[13] << 4)
2256 | (mask_m[14] << 2) | (mask_m[15] << 0);
2257 REG_WRITE(ah, AR_PHY_MASK_CTL, tmp_mask);
2258 REG_WRITE(ah, AR_PHY_MASK2_M_00_15, tmp_mask);
2260 tmp_mask = (mask_p[15] << 28)
2261 | (mask_p[14] << 26) | (mask_p[13] << 24)
2262 | (mask_p[12] << 22) | (mask_p[11] << 20)
2263 | (mask_p[10] << 18) | (mask_p[9] << 16)
2264 | (mask_p[8] << 14) | (mask_p[7] << 12)
2265 | (mask_p[6] << 10) | (mask_p[5] << 8)
2266 | (mask_p[4] << 6) | (mask_p[3] << 4)
2267 | (mask_p[2] << 2) | (mask_p[1] << 0);
2268 REG_WRITE(ah, AR_PHY_BIN_MASK2_1, tmp_mask);
2269 REG_WRITE(ah, AR_PHY_MASK2_P_15_01, tmp_mask);
2271 tmp_mask = (mask_p[30] << 28)
2272 | (mask_p[29] << 26) | (mask_p[28] << 24)
2273 | (mask_p[27] << 22) | (mask_p[26] << 20)
2274 | (mask_p[25] << 18) | (mask_p[24] << 16)
2275 | (mask_p[23] << 14) | (mask_p[22] << 12)
2276 | (mask_p[21] << 10) | (mask_p[20] << 8)
2277 | (mask_p[19] << 6) | (mask_p[18] << 4)
2278 | (mask_p[17] << 2) | (mask_p[16] << 0);
2279 REG_WRITE(ah, AR_PHY_BIN_MASK2_2, tmp_mask);
2280 REG_WRITE(ah, AR_PHY_MASK2_P_30_16, tmp_mask);
2282 tmp_mask = (mask_p[45] << 28)
2283 | (mask_p[44] << 26) | (mask_p[43] << 24)
2284 | (mask_p[42] << 22) | (mask_p[41] << 20)
2285 | (mask_p[40] << 18) | (mask_p[39] << 16)
2286 | (mask_p[38] << 14) | (mask_p[37] << 12)
2287 | (mask_p[36] << 10) | (mask_p[35] << 8)
2288 | (mask_p[34] << 6) | (mask_p[33] << 4)
2289 | (mask_p[32] << 2) | (mask_p[31] << 0);
2290 REG_WRITE(ah, AR_PHY_BIN_MASK2_3, tmp_mask);
2291 REG_WRITE(ah, AR_PHY_MASK2_P_45_31, tmp_mask);
2293 tmp_mask = (mask_p[61] << 30) | (mask_p[60] << 28)
2294 | (mask_p[59] << 26) | (mask_p[58] << 24)
2295 | (mask_p[57] << 22) | (mask_p[56] << 20)
2296 | (mask_p[55] << 18) | (mask_p[54] << 16)
2297 | (mask_p[53] << 14) | (mask_p[52] << 12)
2298 | (mask_p[51] << 10) | (mask_p[50] << 8)
2299 | (mask_p[49] << 6) | (mask_p[48] << 4)
2300 | (mask_p[47] << 2) | (mask_p[46] << 0);
2301 REG_WRITE(ah, AR_PHY_BIN_MASK2_4, tmp_mask);
2302 REG_WRITE(ah, AR_PHY_MASK2_P_61_45, tmp_mask);
2305 static void ath9k_enable_rfkill(struct ath_hw *ah)
2307 REG_SET_BIT(ah, AR_GPIO_INPUT_EN_VAL,
2308 AR_GPIO_INPUT_EN_VAL_RFSILENT_BB);
2310 REG_CLR_BIT(ah, AR_GPIO_INPUT_MUX2,
2311 AR_GPIO_INPUT_MUX2_RFSILENT);
2313 ath9k_hw_cfg_gpio_input(ah, ah->rfkill_gpio);
2314 REG_SET_BIT(ah, AR_PHY_TEST, RFSILENT_BB);
2317 int ath9k_hw_reset(struct ath_hw *ah, struct ath9k_channel *chan,
2318 bool bChannelChange)
2320 struct ath_common *common = ath9k_hw_common(ah);
2322 struct ath9k_channel *curchan = ah->curchan;
2326 int i, rx_chainmask, r;
2328 ah->txchainmask = common->tx_chainmask;
2329 ah->rxchainmask = common->rx_chainmask;
2331 if (!ath9k_hw_setpower(ah, ATH9K_PM_AWAKE))
2334 if (curchan && !ah->chip_fullsleep)
2335 ath9k_hw_getnf(ah, curchan);
2337 if (bChannelChange &&
2338 (ah->chip_fullsleep != true) &&
2339 (ah->curchan != NULL) &&
2340 (chan->channel != ah->curchan->channel) &&
2341 ((chan->channelFlags & CHANNEL_ALL) ==
2342 (ah->curchan->channelFlags & CHANNEL_ALL)) &&
2343 !(AR_SREV_9280(ah) || IS_CHAN_A_5MHZ_SPACED(chan) ||
2344 IS_CHAN_A_5MHZ_SPACED(ah->curchan))) {
2346 if (ath9k_hw_channel_change(ah, chan)) {
2347 ath9k_hw_loadnf(ah, ah->curchan);
2348 ath9k_hw_start_nfcal(ah);
2353 saveDefAntenna = REG_READ(ah, AR_DEF_ANTENNA);
2354 if (saveDefAntenna == 0)
2357 macStaId1 = REG_READ(ah, AR_STA_ID1) & AR_STA_ID1_BASE_RATE_11B;
2359 /* For chips on which RTC reset is done, save TSF before it gets cleared */
2360 if (AR_SREV_9280(ah) && ah->eep_ops->get_eeprom(ah, EEP_OL_PWRCTRL))
2361 tsf = ath9k_hw_gettsf64(ah);
2363 saveLedState = REG_READ(ah, AR_CFG_LED) &
2364 (AR_CFG_LED_ASSOC_CTL | AR_CFG_LED_MODE_SEL |
2365 AR_CFG_LED_BLINK_THRESH_SEL | AR_CFG_LED_BLINK_SLOW);
2367 ath9k_hw_mark_phy_inactive(ah);
2369 if (AR_SREV_9271(ah) && ah->htc_reset_init) {
2371 AR9271_RESET_POWER_DOWN_CONTROL,
2372 AR9271_RADIO_RF_RST);
2376 if (!ath9k_hw_chip_reset(ah, chan)) {
2377 ath_print(common, ATH_DBG_FATAL, "Chip reset failed\n");
2381 if (AR_SREV_9271(ah) && ah->htc_reset_init) {
2382 ah->htc_reset_init = false;
2384 AR9271_RESET_POWER_DOWN_CONTROL,
2385 AR9271_GATE_MAC_CTL);
2390 if (tsf && AR_SREV_9280(ah) && ah->eep_ops->get_eeprom(ah, EEP_OL_PWRCTRL))
2391 ath9k_hw_settsf64(ah, tsf);
2393 if (AR_SREV_9280_10_OR_LATER(ah))
2394 REG_SET_BIT(ah, AR_GPIO_INPUT_EN_VAL, AR_GPIO_JTAG_DISABLE);
2396 if (AR_SREV_9287_12_OR_LATER(ah)) {
2397 /* Enable ASYNC FIFO */
2398 REG_SET_BIT(ah, AR_MAC_PCU_ASYNC_FIFO_REG3,
2399 AR_MAC_PCU_ASYNC_FIFO_REG3_DATAPATH_SEL);
2400 REG_SET_BIT(ah, AR_PHY_MODE, AR_PHY_MODE_ASYNCFIFO);
2401 REG_CLR_BIT(ah, AR_MAC_PCU_ASYNC_FIFO_REG3,
2402 AR_MAC_PCU_ASYNC_FIFO_REG3_SOFT_RESET);
2403 REG_SET_BIT(ah, AR_MAC_PCU_ASYNC_FIFO_REG3,
2404 AR_MAC_PCU_ASYNC_FIFO_REG3_SOFT_RESET);
2406 r = ath9k_hw_process_ini(ah, chan);
2410 /* Setup MFP options for CCMP */
2411 if (AR_SREV_9280_20_OR_LATER(ah)) {
2412 /* Mask Retry(b11), PwrMgt(b12), MoreData(b13) to 0 in mgmt
2413 * frames when constructing CCMP AAD. */
2414 REG_RMW_FIELD(ah, AR_AES_MUTE_MASK1, AR_AES_MUTE_MASK1_FC_MGMT,
2416 ah->sw_mgmt_crypto = false;
2417 } else if (AR_SREV_9160_10_OR_LATER(ah)) {
2418 /* Disable hardware crypto for management frames */
2419 REG_CLR_BIT(ah, AR_PCU_MISC_MODE2,
2420 AR_PCU_MISC_MODE2_MGMT_CRYPTO_ENABLE);
2421 REG_SET_BIT(ah, AR_PCU_MISC_MODE2,
2422 AR_PCU_MISC_MODE2_NO_CRYPTO_FOR_NON_DATA_PKT);
2423 ah->sw_mgmt_crypto = true;
2425 ah->sw_mgmt_crypto = true;
2427 if (IS_CHAN_OFDM(chan) || IS_CHAN_HT(chan))
2428 ath9k_hw_set_delta_slope(ah, chan);
2430 if (AR_SREV_9280_10_OR_LATER(ah))
2431 ath9k_hw_9280_spur_mitigate(ah, chan);
2433 ath9k_hw_spur_mitigate(ah, chan);
2435 ah->eep_ops->set_board_values(ah, chan);
2437 ath9k_hw_decrease_chain_power(ah, chan);
2439 REG_WRITE(ah, AR_STA_ID0, get_unaligned_le32(common->macaddr));
2440 REG_WRITE(ah, AR_STA_ID1, get_unaligned_le16(common->macaddr + 4)
2442 | AR_STA_ID1_RTS_USE_DEF
2444 ack_6mb ? AR_STA_ID1_ACKCTS_6MB : 0)
2445 | ah->sta_id1_defaults);
2446 ath9k_hw_set_operating_mode(ah, ah->opmode);
2448 ath_hw_setbssidmask(common);
2450 REG_WRITE(ah, AR_DEF_ANTENNA, saveDefAntenna);
2452 ath9k_hw_write_associd(ah);
2454 REG_WRITE(ah, AR_ISR, ~0);
2456 REG_WRITE(ah, AR_RSSI_THR, INIT_RSSI_THR);
2458 if (AR_SREV_9280_10_OR_LATER(ah))
2459 ath9k_hw_ar9280_set_channel(ah, chan);
2461 if (!(ath9k_hw_set_channel(ah, chan)))
2464 for (i = 0; i < AR_NUM_DCU; i++)
2465 REG_WRITE(ah, AR_DQCUMASK(i), 1 << i);
2468 for (i = 0; i < ah->caps.total_queues; i++)
2469 ath9k_hw_resettxqueue(ah, i);
2471 ath9k_hw_init_interrupt_masks(ah, ah->opmode);
2472 ath9k_hw_init_qos(ah);
2474 if (ah->caps.hw_caps & ATH9K_HW_CAP_RFSILENT)
2475 ath9k_enable_rfkill(ah);
2477 ath9k_hw_init_user_settings(ah);
2479 if (AR_SREV_9287_12_OR_LATER(ah)) {
2480 REG_WRITE(ah, AR_D_GBL_IFS_SIFS,
2481 AR_D_GBL_IFS_SIFS_ASYNC_FIFO_DUR);
2482 REG_WRITE(ah, AR_D_GBL_IFS_SLOT,
2483 AR_D_GBL_IFS_SLOT_ASYNC_FIFO_DUR);
2484 REG_WRITE(ah, AR_D_GBL_IFS_EIFS,
2485 AR_D_GBL_IFS_EIFS_ASYNC_FIFO_DUR);
2487 REG_WRITE(ah, AR_TIME_OUT, AR_TIME_OUT_ACK_CTS_ASYNC_FIFO_DUR);
2488 REG_WRITE(ah, AR_USEC, AR_USEC_ASYNC_FIFO_DUR);
2490 REG_SET_BIT(ah, AR_MAC_PCU_LOGIC_ANALYZER,
2491 AR_MAC_PCU_LOGIC_ANALYZER_DISBUG20768);
2492 REG_RMW_FIELD(ah, AR_AHB_MODE, AR_AHB_CUSTOM_BURST_EN,
2493 AR_AHB_CUSTOM_BURST_ASYNC_FIFO_VAL);
2495 if (AR_SREV_9287_12_OR_LATER(ah)) {
2496 REG_SET_BIT(ah, AR_PCU_MISC_MODE2,
2497 AR_PCU_MISC_MODE2_ENABLE_AGGWEP);
2500 REG_WRITE(ah, AR_STA_ID1,
2501 REG_READ(ah, AR_STA_ID1) | AR_STA_ID1_PRESERVE_SEQNUM);
2503 ath9k_hw_set_dma(ah);
2505 REG_WRITE(ah, AR_OBS, 8);
2507 if (ah->config.intr_mitigation) {
2508 REG_RMW_FIELD(ah, AR_RIMT, AR_RIMT_LAST, 500);
2509 REG_RMW_FIELD(ah, AR_RIMT, AR_RIMT_FIRST, 2000);
2512 ath9k_hw_init_bb(ah, chan);
2514 if (!ath9k_hw_init_cal(ah, chan))
2517 rx_chainmask = ah->rxchainmask;
2518 if ((rx_chainmask == 0x5) || (rx_chainmask == 0x3)) {
2519 REG_WRITE(ah, AR_PHY_RX_CHAINMASK, rx_chainmask);
2520 REG_WRITE(ah, AR_PHY_CAL_CHAINMASK, rx_chainmask);
2523 REG_WRITE(ah, AR_CFG_LED, saveLedState | AR_CFG_SCLK_32KHZ);
2526 * For big endian systems turn on swapping for descriptors
2528 if (AR_SREV_9100(ah)) {
2530 mask = REG_READ(ah, AR_CFG);
2531 if (mask & (AR_CFG_SWRB | AR_CFG_SWTB | AR_CFG_SWRG)) {
2532 ath_print(common, ATH_DBG_RESET,
2533 "CFG Byte Swap Set 0x%x\n", mask);
2536 INIT_CONFIG_STATUS | AR_CFG_SWRB | AR_CFG_SWTB;
2537 REG_WRITE(ah, AR_CFG, mask);
2538 ath_print(common, ATH_DBG_RESET,
2539 "Setting CFG 0x%x\n", REG_READ(ah, AR_CFG));
2542 /* Configure AR9271 target WLAN */
2543 if (AR_SREV_9271(ah))
2544 REG_WRITE(ah, AR_CFG, AR_CFG_SWRB | AR_CFG_SWTB);
2547 REG_WRITE(ah, AR_CFG, AR_CFG_SWTD | AR_CFG_SWRD);
2551 if (ah->btcoex_hw.enabled)
2552 ath9k_hw_btcoex_enable(ah);
2557 /************************/
2558 /* Key Cache Management */
2559 /************************/
2561 bool ath9k_hw_keyreset(struct ath_hw *ah, u16 entry)
2565 if (entry >= ah->caps.keycache_size) {
2566 ath_print(ath9k_hw_common(ah), ATH_DBG_FATAL,
2567 "keychache entry %u out of range\n", entry);
2571 keyType = REG_READ(ah, AR_KEYTABLE_TYPE(entry));
2573 REG_WRITE(ah, AR_KEYTABLE_KEY0(entry), 0);
2574 REG_WRITE(ah, AR_KEYTABLE_KEY1(entry), 0);
2575 REG_WRITE(ah, AR_KEYTABLE_KEY2(entry), 0);
2576 REG_WRITE(ah, AR_KEYTABLE_KEY3(entry), 0);
2577 REG_WRITE(ah, AR_KEYTABLE_KEY4(entry), 0);
2578 REG_WRITE(ah, AR_KEYTABLE_TYPE(entry), AR_KEYTABLE_TYPE_CLR);
2579 REG_WRITE(ah, AR_KEYTABLE_MAC0(entry), 0);
2580 REG_WRITE(ah, AR_KEYTABLE_MAC1(entry), 0);
2582 if (keyType == AR_KEYTABLE_TYPE_TKIP && ATH9K_IS_MIC_ENABLED(ah)) {
2583 u16 micentry = entry + 64;
2585 REG_WRITE(ah, AR_KEYTABLE_KEY0(micentry), 0);
2586 REG_WRITE(ah, AR_KEYTABLE_KEY1(micentry), 0);
2587 REG_WRITE(ah, AR_KEYTABLE_KEY2(micentry), 0);
2588 REG_WRITE(ah, AR_KEYTABLE_KEY3(micentry), 0);
2595 bool ath9k_hw_keysetmac(struct ath_hw *ah, u16 entry, const u8 *mac)
2599 if (entry >= ah->caps.keycache_size) {
2600 ath_print(ath9k_hw_common(ah), ATH_DBG_FATAL,
2601 "keychache entry %u out of range\n", entry);
2606 macHi = (mac[5] << 8) | mac[4];
2607 macLo = (mac[3] << 24) |
2612 macLo |= (macHi & 1) << 31;
2617 REG_WRITE(ah, AR_KEYTABLE_MAC0(entry), macLo);
2618 REG_WRITE(ah, AR_KEYTABLE_MAC1(entry), macHi | AR_KEYTABLE_VALID);
2623 bool ath9k_hw_set_keycache_entry(struct ath_hw *ah, u16 entry,
2624 const struct ath9k_keyval *k,
2627 const struct ath9k_hw_capabilities *pCap = &ah->caps;
2628 struct ath_common *common = ath9k_hw_common(ah);
2629 u32 key0, key1, key2, key3, key4;
2632 if (entry >= pCap->keycache_size) {
2633 ath_print(common, ATH_DBG_FATAL,
2634 "keycache entry %u out of range\n", entry);
2638 switch (k->kv_type) {
2639 case ATH9K_CIPHER_AES_OCB:
2640 keyType = AR_KEYTABLE_TYPE_AES;
2642 case ATH9K_CIPHER_AES_CCM:
2643 if (!(pCap->hw_caps & ATH9K_HW_CAP_CIPHER_AESCCM)) {
2644 ath_print(common, ATH_DBG_ANY,
2645 "AES-CCM not supported by mac rev 0x%x\n",
2646 ah->hw_version.macRev);
2649 keyType = AR_KEYTABLE_TYPE_CCM;
2651 case ATH9K_CIPHER_TKIP:
2652 keyType = AR_KEYTABLE_TYPE_TKIP;
2653 if (ATH9K_IS_MIC_ENABLED(ah)
2654 && entry + 64 >= pCap->keycache_size) {
2655 ath_print(common, ATH_DBG_ANY,
2656 "entry %u inappropriate for TKIP\n", entry);
2660 case ATH9K_CIPHER_WEP:
2661 if (k->kv_len < WLAN_KEY_LEN_WEP40) {
2662 ath_print(common, ATH_DBG_ANY,
2663 "WEP key length %u too small\n", k->kv_len);
2666 if (k->kv_len <= WLAN_KEY_LEN_WEP40)
2667 keyType = AR_KEYTABLE_TYPE_40;
2668 else if (k->kv_len <= WLAN_KEY_LEN_WEP104)
2669 keyType = AR_KEYTABLE_TYPE_104;
2671 keyType = AR_KEYTABLE_TYPE_128;
2673 case ATH9K_CIPHER_CLR:
2674 keyType = AR_KEYTABLE_TYPE_CLR;
2677 ath_print(common, ATH_DBG_FATAL,
2678 "cipher %u not supported\n", k->kv_type);
2682 key0 = get_unaligned_le32(k->kv_val + 0);
2683 key1 = get_unaligned_le16(k->kv_val + 4);
2684 key2 = get_unaligned_le32(k->kv_val + 6);
2685 key3 = get_unaligned_le16(k->kv_val + 10);
2686 key4 = get_unaligned_le32(k->kv_val + 12);
2687 if (k->kv_len <= WLAN_KEY_LEN_WEP104)
2691 * Note: Key cache registers access special memory area that requires
2692 * two 32-bit writes to actually update the values in the internal
2693 * memory. Consequently, the exact order and pairs used here must be
2697 if (keyType == AR_KEYTABLE_TYPE_TKIP && ATH9K_IS_MIC_ENABLED(ah)) {
2698 u16 micentry = entry + 64;
2701 * Write inverted key[47:0] first to avoid Michael MIC errors
2702 * on frames that could be sent or received at the same time.
2703 * The correct key will be written in the end once everything
2706 REG_WRITE(ah, AR_KEYTABLE_KEY0(entry), ~key0);
2707 REG_WRITE(ah, AR_KEYTABLE_KEY1(entry), ~key1);
2709 /* Write key[95:48] */
2710 REG_WRITE(ah, AR_KEYTABLE_KEY2(entry), key2);
2711 REG_WRITE(ah, AR_KEYTABLE_KEY3(entry), key3);
2713 /* Write key[127:96] and key type */
2714 REG_WRITE(ah, AR_KEYTABLE_KEY4(entry), key4);
2715 REG_WRITE(ah, AR_KEYTABLE_TYPE(entry), keyType);
2717 /* Write MAC address for the entry */
2718 (void) ath9k_hw_keysetmac(ah, entry, mac);
2720 if (ah->misc_mode & AR_PCU_MIC_NEW_LOC_ENA) {
2722 * TKIP uses two key cache entries:
2723 * Michael MIC TX/RX keys in the same key cache entry
2724 * (idx = main index + 64):
2725 * key0 [31:0] = RX key [31:0]
2726 * key1 [15:0] = TX key [31:16]
2727 * key1 [31:16] = reserved
2728 * key2 [31:0] = RX key [63:32]
2729 * key3 [15:0] = TX key [15:0]
2730 * key3 [31:16] = reserved
2731 * key4 [31:0] = TX key [63:32]
2733 u32 mic0, mic1, mic2, mic3, mic4;
2735 mic0 = get_unaligned_le32(k->kv_mic + 0);
2736 mic2 = get_unaligned_le32(k->kv_mic + 4);
2737 mic1 = get_unaligned_le16(k->kv_txmic + 2) & 0xffff;
2738 mic3 = get_unaligned_le16(k->kv_txmic + 0) & 0xffff;
2739 mic4 = get_unaligned_le32(k->kv_txmic + 4);
2741 /* Write RX[31:0] and TX[31:16] */
2742 REG_WRITE(ah, AR_KEYTABLE_KEY0(micentry), mic0);
2743 REG_WRITE(ah, AR_KEYTABLE_KEY1(micentry), mic1);
2745 /* Write RX[63:32] and TX[15:0] */
2746 REG_WRITE(ah, AR_KEYTABLE_KEY2(micentry), mic2);
2747 REG_WRITE(ah, AR_KEYTABLE_KEY3(micentry), mic3);
2749 /* Write TX[63:32] and keyType(reserved) */
2750 REG_WRITE(ah, AR_KEYTABLE_KEY4(micentry), mic4);
2751 REG_WRITE(ah, AR_KEYTABLE_TYPE(micentry),
2752 AR_KEYTABLE_TYPE_CLR);
2756 * TKIP uses four key cache entries (two for group
2758 * Michael MIC TX/RX keys are in different key cache
2759 * entries (idx = main index + 64 for TX and
2760 * main index + 32 + 96 for RX):
2761 * key0 [31:0] = TX/RX MIC key [31:0]
2762 * key1 [31:0] = reserved
2763 * key2 [31:0] = TX/RX MIC key [63:32]
2764 * key3 [31:0] = reserved
2765 * key4 [31:0] = reserved
2767 * Upper layer code will call this function separately
2768 * for TX and RX keys when these registers offsets are
2773 mic0 = get_unaligned_le32(k->kv_mic + 0);
2774 mic2 = get_unaligned_le32(k->kv_mic + 4);
2776 /* Write MIC key[31:0] */
2777 REG_WRITE(ah, AR_KEYTABLE_KEY0(micentry), mic0);
2778 REG_WRITE(ah, AR_KEYTABLE_KEY1(micentry), 0);
2780 /* Write MIC key[63:32] */
2781 REG_WRITE(ah, AR_KEYTABLE_KEY2(micentry), mic2);
2782 REG_WRITE(ah, AR_KEYTABLE_KEY3(micentry), 0);
2784 /* Write TX[63:32] and keyType(reserved) */
2785 REG_WRITE(ah, AR_KEYTABLE_KEY4(micentry), 0);
2786 REG_WRITE(ah, AR_KEYTABLE_TYPE(micentry),
2787 AR_KEYTABLE_TYPE_CLR);
2790 /* MAC address registers are reserved for the MIC entry */
2791 REG_WRITE(ah, AR_KEYTABLE_MAC0(micentry), 0);
2792 REG_WRITE(ah, AR_KEYTABLE_MAC1(micentry), 0);
2795 * Write the correct (un-inverted) key[47:0] last to enable
2796 * TKIP now that all other registers are set with correct
2799 REG_WRITE(ah, AR_KEYTABLE_KEY0(entry), key0);
2800 REG_WRITE(ah, AR_KEYTABLE_KEY1(entry), key1);
2802 /* Write key[47:0] */
2803 REG_WRITE(ah, AR_KEYTABLE_KEY0(entry), key0);
2804 REG_WRITE(ah, AR_KEYTABLE_KEY1(entry), key1);
2806 /* Write key[95:48] */
2807 REG_WRITE(ah, AR_KEYTABLE_KEY2(entry), key2);
2808 REG_WRITE(ah, AR_KEYTABLE_KEY3(entry), key3);
2810 /* Write key[127:96] and key type */
2811 REG_WRITE(ah, AR_KEYTABLE_KEY4(entry), key4);
2812 REG_WRITE(ah, AR_KEYTABLE_TYPE(entry), keyType);
2814 /* Write MAC address for the entry */
2815 (void) ath9k_hw_keysetmac(ah, entry, mac);
2821 bool ath9k_hw_keyisvalid(struct ath_hw *ah, u16 entry)
2823 if (entry < ah->caps.keycache_size) {
2824 u32 val = REG_READ(ah, AR_KEYTABLE_MAC1(entry));
2825 if (val & AR_KEYTABLE_VALID)
2831 /******************************/
2832 /* Power Management (Chipset) */
2833 /******************************/
2835 static void ath9k_set_power_sleep(struct ath_hw *ah, int setChip)
2837 REG_SET_BIT(ah, AR_STA_ID1, AR_STA_ID1_PWR_SAV);
2839 REG_CLR_BIT(ah, AR_RTC_FORCE_WAKE,
2840 AR_RTC_FORCE_WAKE_EN);
2841 if (!AR_SREV_9100(ah))
2842 REG_WRITE(ah, AR_RC, AR_RC_AHB | AR_RC_HOSTIF);
2844 REG_CLR_BIT(ah, (AR_RTC_RESET),
2849 static void ath9k_set_power_network_sleep(struct ath_hw *ah, int setChip)
2851 REG_SET_BIT(ah, AR_STA_ID1, AR_STA_ID1_PWR_SAV);
2853 struct ath9k_hw_capabilities *pCap = &ah->caps;
2855 if (!(pCap->hw_caps & ATH9K_HW_CAP_AUTOSLEEP)) {
2856 REG_WRITE(ah, AR_RTC_FORCE_WAKE,
2857 AR_RTC_FORCE_WAKE_ON_INT);
2859 REG_CLR_BIT(ah, AR_RTC_FORCE_WAKE,
2860 AR_RTC_FORCE_WAKE_EN);
2865 static bool ath9k_hw_set_power_awake(struct ath_hw *ah, int setChip)
2871 if ((REG_READ(ah, AR_RTC_STATUS) &
2872 AR_RTC_STATUS_M) == AR_RTC_STATUS_SHUTDOWN) {
2873 if (ath9k_hw_set_reset_reg(ah,
2874 ATH9K_RESET_POWER_ON) != true) {
2878 if (AR_SREV_9100(ah))
2879 REG_SET_BIT(ah, AR_RTC_RESET,
2882 REG_SET_BIT(ah, AR_RTC_FORCE_WAKE,
2883 AR_RTC_FORCE_WAKE_EN);
2886 for (i = POWER_UP_TIME / 50; i > 0; i--) {
2887 val = REG_READ(ah, AR_RTC_STATUS) & AR_RTC_STATUS_M;
2888 if (val == AR_RTC_STATUS_ON)
2891 REG_SET_BIT(ah, AR_RTC_FORCE_WAKE,
2892 AR_RTC_FORCE_WAKE_EN);
2895 ath_print(ath9k_hw_common(ah), ATH_DBG_FATAL,
2896 "Failed to wakeup in %uus\n",
2897 POWER_UP_TIME / 20);
2902 REG_CLR_BIT(ah, AR_STA_ID1, AR_STA_ID1_PWR_SAV);
2907 bool ath9k_hw_setpower(struct ath_hw *ah, enum ath9k_power_mode mode)
2909 struct ath_common *common = ath9k_hw_common(ah);
2910 int status = true, setChip = true;
2911 static const char *modes[] = {
2918 if (ah->power_mode == mode)
2921 ath_print(common, ATH_DBG_RESET, "%s -> %s\n",
2922 modes[ah->power_mode], modes[mode]);
2925 case ATH9K_PM_AWAKE:
2926 status = ath9k_hw_set_power_awake(ah, setChip);
2928 case ATH9K_PM_FULL_SLEEP:
2929 ath9k_set_power_sleep(ah, setChip);
2930 ah->chip_fullsleep = true;
2932 case ATH9K_PM_NETWORK_SLEEP:
2933 ath9k_set_power_network_sleep(ah, setChip);
2936 ath_print(common, ATH_DBG_FATAL,
2937 "Unknown power mode %u\n", mode);
2940 ah->power_mode = mode;
2946 * Helper for ASPM support.
2948 * Disable PLL when in L0s as well as receiver clock when in L1.
2949 * This power saving option must be enabled through the SerDes.
2951 * Programming the SerDes must go through the same 288 bit serial shift
2952 * register as the other analog registers. Hence the 9 writes.
2954 void ath9k_hw_configpcipowersave(struct ath_hw *ah, int restore, int power_off)
2959 if (ah->is_pciexpress != true)
2962 /* Do not touch SerDes registers */
2963 if (ah->config.pcie_powersave_enable == 2)
2966 /* Nothing to do on restore for 11N */
2968 if (AR_SREV_9280_20_OR_LATER(ah)) {
2970 * AR9280 2.0 or later chips use SerDes values from the
2971 * initvals.h initialized depending on chipset during
2974 for (i = 0; i < ah->iniPcieSerdes.ia_rows; i++) {
2975 REG_WRITE(ah, INI_RA(&ah->iniPcieSerdes, i, 0),
2976 INI_RA(&ah->iniPcieSerdes, i, 1));
2978 } else if (AR_SREV_9280(ah) &&
2979 (ah->hw_version.macRev == AR_SREV_REVISION_9280_10)) {
2980 REG_WRITE(ah, AR_PCIE_SERDES, 0x9248fd00);
2981 REG_WRITE(ah, AR_PCIE_SERDES, 0x24924924);
2983 /* RX shut off when elecidle is asserted */
2984 REG_WRITE(ah, AR_PCIE_SERDES, 0xa8000019);
2985 REG_WRITE(ah, AR_PCIE_SERDES, 0x13160820);
2986 REG_WRITE(ah, AR_PCIE_SERDES, 0xe5980560);
2988 /* Shut off CLKREQ active in L1 */
2989 if (ah->config.pcie_clock_req)
2990 REG_WRITE(ah, AR_PCIE_SERDES, 0x401deffc);
2992 REG_WRITE(ah, AR_PCIE_SERDES, 0x401deffd);
2994 REG_WRITE(ah, AR_PCIE_SERDES, 0x1aaabe40);
2995 REG_WRITE(ah, AR_PCIE_SERDES, 0xbe105554);
2996 REG_WRITE(ah, AR_PCIE_SERDES, 0x00043007);
2998 /* Load the new settings */
2999 REG_WRITE(ah, AR_PCIE_SERDES2, 0x00000000);
3002 REG_WRITE(ah, AR_PCIE_SERDES, 0x9248fc00);
3003 REG_WRITE(ah, AR_PCIE_SERDES, 0x24924924);
3005 /* RX shut off when elecidle is asserted */
3006 REG_WRITE(ah, AR_PCIE_SERDES, 0x28000039);
3007 REG_WRITE(ah, AR_PCIE_SERDES, 0x53160824);
3008 REG_WRITE(ah, AR_PCIE_SERDES, 0xe5980579);
3011 * Ignore ah->ah_config.pcie_clock_req setting for
3014 REG_WRITE(ah, AR_PCIE_SERDES, 0x001defff);
3016 REG_WRITE(ah, AR_PCIE_SERDES, 0x1aaabe40);
3017 REG_WRITE(ah, AR_PCIE_SERDES, 0xbe105554);
3018 REG_WRITE(ah, AR_PCIE_SERDES, 0x000e3007);
3020 /* Load the new settings */
3021 REG_WRITE(ah, AR_PCIE_SERDES2, 0x00000000);
3026 /* set bit 19 to allow forcing of pcie core into L1 state */
3027 REG_SET_BIT(ah, AR_PCIE_PM_CTRL, AR_PCIE_PM_CTRL_ENA);
3029 /* Several PCIe massages to ensure proper behaviour */
3030 if (ah->config.pcie_waen) {
3031 val = ah->config.pcie_waen;
3033 val &= (~AR_WA_D3_L1_DISABLE);
3035 if (AR_SREV_9285(ah) || AR_SREV_9271(ah) ||
3037 val = AR9285_WA_DEFAULT;
3039 val &= (~AR_WA_D3_L1_DISABLE);
3040 } else if (AR_SREV_9280(ah)) {
3042 * On AR9280 chips bit 22 of 0x4004 needs to be
3043 * set otherwise card may disappear.
3045 val = AR9280_WA_DEFAULT;
3047 val &= (~AR_WA_D3_L1_DISABLE);
3049 val = AR_WA_DEFAULT;
3052 REG_WRITE(ah, AR_WA, val);