Thomas Gleixner | d2912cb | 2019-06-04 10:11:33 +0200 | [diff] [blame^] | 1 | // SPDX-License-Identifier: GPL-2.0-only |
Vineet Gupta | 2e651ea | 2013-01-23 16:30:36 +0530 | [diff] [blame] | 2 | /* |
| 3 | * Copyright (C) 2011-2012 Synopsys (www.synopsys.com) |
| 4 | * |
Vineet Gupta | 2e651ea | 2013-01-23 16:30:36 +0530 | [diff] [blame] | 5 | * vineetg : May 2011 |
| 6 | * -Adapted (from .26 to .35) |
| 7 | * -original contribution by Tim.yao@amlogic.com |
Vineet Gupta | 2e651ea | 2013-01-23 16:30:36 +0530 | [diff] [blame] | 8 | */ |
| 9 | |
| 10 | #include <linux/types.h> |
Vineet Gupta | ceed97a | 2014-10-02 12:30:42 +0530 | [diff] [blame] | 11 | #include <linux/perf_event.h> |
Vineet Gupta | 2e651ea | 2013-01-23 16:30:36 +0530 | [diff] [blame] | 12 | #include <linux/ptrace.h> |
| 13 | #include <linux/uaccess.h> |
| 14 | #include <asm/disasm.h> |
| 15 | |
Noam Camus | 7d669a1 | 2012-09-10 15:13:19 +0300 | [diff] [blame] | 16 | #ifdef CONFIG_CPU_BIG_ENDIAN |
| 17 | #define BE 1 |
| 18 | #define FIRST_BYTE_16 "swap %1, %1\n swape %1, %1\n" |
| 19 | #define FIRST_BYTE_32 "swape %1, %1\n" |
| 20 | #else |
| 21 | #define BE 0 |
| 22 | #define FIRST_BYTE_16 |
| 23 | #define FIRST_BYTE_32 |
| 24 | #endif |
| 25 | |
Vineet Gupta | 2e651ea | 2013-01-23 16:30:36 +0530 | [diff] [blame] | 26 | #define __get8_unaligned_check(val, addr, err) \ |
| 27 | __asm__( \ |
| 28 | "1: ldb.ab %1, [%2, 1]\n" \ |
| 29 | "2:\n" \ |
| 30 | " .section .fixup,\"ax\"\n" \ |
| 31 | " .align 4\n" \ |
| 32 | "3: mov %0, 1\n" \ |
Yuriy Kolerov | 6de6066 | 2015-08-12 17:23:32 +0300 | [diff] [blame] | 33 | " j 2b\n" \ |
Vineet Gupta | 2e651ea | 2013-01-23 16:30:36 +0530 | [diff] [blame] | 34 | " .previous\n" \ |
| 35 | " .section __ex_table,\"a\"\n" \ |
| 36 | " .align 4\n" \ |
| 37 | " .long 1b, 3b\n" \ |
| 38 | " .previous\n" \ |
| 39 | : "=r" (err), "=&r" (val), "=r" (addr) \ |
| 40 | : "0" (err), "2" (addr)) |
| 41 | |
| 42 | #define get16_unaligned_check(val, addr) \ |
| 43 | do { \ |
| 44 | unsigned int err = 0, v, a = addr; \ |
| 45 | __get8_unaligned_check(v, a, err); \ |
Noam Camus | 7d669a1 | 2012-09-10 15:13:19 +0300 | [diff] [blame] | 46 | val = v << ((BE) ? 8 : 0); \ |
Vineet Gupta | 2e651ea | 2013-01-23 16:30:36 +0530 | [diff] [blame] | 47 | __get8_unaligned_check(v, a, err); \ |
Noam Camus | 7d669a1 | 2012-09-10 15:13:19 +0300 | [diff] [blame] | 48 | val |= v << ((BE) ? 0 : 8); \ |
Vineet Gupta | 2e651ea | 2013-01-23 16:30:36 +0530 | [diff] [blame] | 49 | if (err) \ |
| 50 | goto fault; \ |
| 51 | } while (0) |
| 52 | |
| 53 | #define get32_unaligned_check(val, addr) \ |
| 54 | do { \ |
| 55 | unsigned int err = 0, v, a = addr; \ |
| 56 | __get8_unaligned_check(v, a, err); \ |
Noam Camus | 7d669a1 | 2012-09-10 15:13:19 +0300 | [diff] [blame] | 57 | val = v << ((BE) ? 24 : 0); \ |
Vineet Gupta | 2e651ea | 2013-01-23 16:30:36 +0530 | [diff] [blame] | 58 | __get8_unaligned_check(v, a, err); \ |
Noam Camus | 7d669a1 | 2012-09-10 15:13:19 +0300 | [diff] [blame] | 59 | val |= v << ((BE) ? 16 : 8); \ |
Vineet Gupta | 2e651ea | 2013-01-23 16:30:36 +0530 | [diff] [blame] | 60 | __get8_unaligned_check(v, a, err); \ |
Noam Camus | 7d669a1 | 2012-09-10 15:13:19 +0300 | [diff] [blame] | 61 | val |= v << ((BE) ? 8 : 16); \ |
Vineet Gupta | 2e651ea | 2013-01-23 16:30:36 +0530 | [diff] [blame] | 62 | __get8_unaligned_check(v, a, err); \ |
Noam Camus | 7d669a1 | 2012-09-10 15:13:19 +0300 | [diff] [blame] | 63 | val |= v << ((BE) ? 0 : 24); \ |
Vineet Gupta | 2e651ea | 2013-01-23 16:30:36 +0530 | [diff] [blame] | 64 | if (err) \ |
| 65 | goto fault; \ |
| 66 | } while (0) |
| 67 | |
| 68 | #define put16_unaligned_check(val, addr) \ |
| 69 | do { \ |
| 70 | unsigned int err = 0, v = val, a = addr;\ |
| 71 | \ |
| 72 | __asm__( \ |
Noam Camus | 7d669a1 | 2012-09-10 15:13:19 +0300 | [diff] [blame] | 73 | FIRST_BYTE_16 \ |
Vineet Gupta | 2e651ea | 2013-01-23 16:30:36 +0530 | [diff] [blame] | 74 | "1: stb.ab %1, [%2, 1]\n" \ |
| 75 | " lsr %1, %1, 8\n" \ |
| 76 | "2: stb %1, [%2]\n" \ |
| 77 | "3:\n" \ |
| 78 | " .section .fixup,\"ax\"\n" \ |
| 79 | " .align 4\n" \ |
| 80 | "4: mov %0, 1\n" \ |
Yuriy Kolerov | 6de6066 | 2015-08-12 17:23:32 +0300 | [diff] [blame] | 81 | " j 3b\n" \ |
Vineet Gupta | 2e651ea | 2013-01-23 16:30:36 +0530 | [diff] [blame] | 82 | " .previous\n" \ |
| 83 | " .section __ex_table,\"a\"\n" \ |
| 84 | " .align 4\n" \ |
| 85 | " .long 1b, 4b\n" \ |
| 86 | " .long 2b, 4b\n" \ |
| 87 | " .previous\n" \ |
| 88 | : "=r" (err), "=&r" (v), "=&r" (a) \ |
| 89 | : "0" (err), "1" (v), "2" (a)); \ |
| 90 | \ |
| 91 | if (err) \ |
| 92 | goto fault; \ |
| 93 | } while (0) |
| 94 | |
| 95 | #define put32_unaligned_check(val, addr) \ |
| 96 | do { \ |
| 97 | unsigned int err = 0, v = val, a = addr;\ |
Vineet Gupta | 2e651ea | 2013-01-23 16:30:36 +0530 | [diff] [blame] | 98 | \ |
Noam Camus | 7d669a1 | 2012-09-10 15:13:19 +0300 | [diff] [blame] | 99 | __asm__( \ |
| 100 | FIRST_BYTE_32 \ |
Vineet Gupta | 2e651ea | 2013-01-23 16:30:36 +0530 | [diff] [blame] | 101 | "1: stb.ab %1, [%2, 1]\n" \ |
| 102 | " lsr %1, %1, 8\n" \ |
| 103 | "2: stb.ab %1, [%2, 1]\n" \ |
| 104 | " lsr %1, %1, 8\n" \ |
| 105 | "3: stb.ab %1, [%2, 1]\n" \ |
| 106 | " lsr %1, %1, 8\n" \ |
| 107 | "4: stb %1, [%2]\n" \ |
| 108 | "5:\n" \ |
| 109 | " .section .fixup,\"ax\"\n" \ |
| 110 | " .align 4\n" \ |
| 111 | "6: mov %0, 1\n" \ |
Yuriy Kolerov | 6de6066 | 2015-08-12 17:23:32 +0300 | [diff] [blame] | 112 | " j 5b\n" \ |
Vineet Gupta | 2e651ea | 2013-01-23 16:30:36 +0530 | [diff] [blame] | 113 | " .previous\n" \ |
| 114 | " .section __ex_table,\"a\"\n" \ |
| 115 | " .align 4\n" \ |
| 116 | " .long 1b, 6b\n" \ |
| 117 | " .long 2b, 6b\n" \ |
| 118 | " .long 3b, 6b\n" \ |
| 119 | " .long 4b, 6b\n" \ |
| 120 | " .previous\n" \ |
| 121 | : "=r" (err), "=&r" (v), "=&r" (a) \ |
| 122 | : "0" (err), "1" (v), "2" (a)); \ |
| 123 | \ |
| 124 | if (err) \ |
| 125 | goto fault; \ |
| 126 | } while (0) |
| 127 | |
| 128 | /* sysctl hooks */ |
| 129 | int unaligned_enabled __read_mostly = 1; /* Enabled by default */ |
| 130 | int no_unaligned_warning __read_mostly = 1; /* Only 1 warning by default */ |
| 131 | |
| 132 | static void fixup_load(struct disasm_state *state, struct pt_regs *regs, |
| 133 | struct callee_regs *cregs) |
| 134 | { |
| 135 | int val; |
| 136 | |
| 137 | /* register write back */ |
| 138 | if ((state->aa == 1) || (state->aa == 2)) { |
| 139 | set_reg(state->wb_reg, state->src1 + state->src2, regs, cregs); |
| 140 | |
| 141 | if (state->aa == 2) |
| 142 | state->src2 = 0; |
| 143 | } |
| 144 | |
| 145 | if (state->zz == 0) { |
| 146 | get32_unaligned_check(val, state->src1 + state->src2); |
| 147 | } else { |
| 148 | get16_unaligned_check(val, state->src1 + state->src2); |
| 149 | |
| 150 | if (state->x) |
| 151 | val = (val << 16) >> 16; |
| 152 | } |
| 153 | |
| 154 | if (state->pref == 0) |
| 155 | set_reg(state->dest, val, regs, cregs); |
| 156 | |
| 157 | return; |
| 158 | |
| 159 | fault: state->fault = 1; |
| 160 | } |
| 161 | |
| 162 | static void fixup_store(struct disasm_state *state, struct pt_regs *regs, |
| 163 | struct callee_regs *cregs) |
| 164 | { |
| 165 | /* register write back */ |
| 166 | if ((state->aa == 1) || (state->aa == 2)) { |
| 167 | set_reg(state->wb_reg, state->src2 + state->src3, regs, cregs); |
| 168 | |
| 169 | if (state->aa == 3) |
| 170 | state->src3 = 0; |
| 171 | } else if (state->aa == 3) { |
| 172 | if (state->zz == 2) { |
| 173 | set_reg(state->wb_reg, state->src2 + (state->src3 << 1), |
| 174 | regs, cregs); |
| 175 | } else if (!state->zz) { |
| 176 | set_reg(state->wb_reg, state->src2 + (state->src3 << 2), |
| 177 | regs, cregs); |
| 178 | } else { |
| 179 | goto fault; |
| 180 | } |
| 181 | } |
| 182 | |
| 183 | /* write fix-up */ |
| 184 | if (!state->zz) |
| 185 | put32_unaligned_check(state->src1, state->src2 + state->src3); |
| 186 | else |
| 187 | put16_unaligned_check(state->src1, state->src2 + state->src3); |
| 188 | |
| 189 | return; |
| 190 | |
| 191 | fault: state->fault = 1; |
| 192 | } |
| 193 | |
| 194 | /* |
| 195 | * Handle an unaligned access |
| 196 | * Returns 0 if successfully handled, 1 if some error happened |
| 197 | */ |
| 198 | int misaligned_fixup(unsigned long address, struct pt_regs *regs, |
Vineet Gupta | 38a9ff6 | 2013-06-12 15:13:40 +0530 | [diff] [blame] | 199 | struct callee_regs *cregs) |
Vineet Gupta | 2e651ea | 2013-01-23 16:30:36 +0530 | [diff] [blame] | 200 | { |
| 201 | struct disasm_state state; |
| 202 | char buf[TASK_COMM_LEN]; |
| 203 | |
| 204 | /* handle user mode only and only if enabled by sysadmin */ |
| 205 | if (!user_mode(regs) || !unaligned_enabled) |
| 206 | return 1; |
| 207 | |
| 208 | if (no_unaligned_warning) { |
| 209 | pr_warn_once("%s(%d) made unaligned access which was emulated" |
| 210 | " by kernel assist\n. This can degrade application" |
| 211 | " performance significantly\n. To enable further" |
| 212 | " logging of such instances, please \n" |
| 213 | " echo 0 > /proc/sys/kernel/ignore-unaligned-usertrap\n", |
| 214 | get_task_comm(buf, current), task_pid_nr(current)); |
| 215 | } else { |
| 216 | /* Add rate limiting if it gets down to it */ |
| 217 | pr_warn("%s(%d): unaligned access to/from 0x%lx by PC: 0x%lx\n", |
| 218 | get_task_comm(buf, current), task_pid_nr(current), |
| 219 | address, regs->ret); |
| 220 | |
| 221 | } |
| 222 | |
| 223 | disasm_instr(regs->ret, &state, 1, regs, cregs); |
| 224 | |
| 225 | if (state.fault) |
| 226 | goto fault; |
| 227 | |
| 228 | /* ldb/stb should not have unaligned exception */ |
| 229 | if ((state.zz == 1) || (state.di)) |
| 230 | goto fault; |
| 231 | |
| 232 | if (!state.write) |
| 233 | fixup_load(&state, regs, cregs); |
| 234 | else |
| 235 | fixup_store(&state, regs, cregs); |
| 236 | |
| 237 | if (state.fault) |
| 238 | goto fault; |
| 239 | |
Vineet Gupta | 9aed02f | 2017-01-27 10:45:27 -0800 | [diff] [blame] | 240 | /* clear any remanants of delay slot */ |
Vineet Gupta | 2e651ea | 2013-01-23 16:30:36 +0530 | [diff] [blame] | 241 | if (delay_mode(regs)) { |
Vineet Gupta | a524c21 | 2017-02-07 09:44:58 -0800 | [diff] [blame] | 242 | regs->ret = regs->bta & ~1U; |
Vineet Gupta | 2e651ea | 2013-01-23 16:30:36 +0530 | [diff] [blame] | 243 | regs->status32 &= ~STATUS_DE_MASK; |
| 244 | } else { |
| 245 | regs->ret += state.instr_len; |
Mischa Jonker | c11eb22 | 2013-09-26 15:44:56 +0200 | [diff] [blame] | 246 | |
| 247 | /* handle zero-overhead-loop */ |
| 248 | if ((regs->ret == regs->lp_end) && (regs->lp_count)) { |
| 249 | regs->ret = regs->lp_start; |
| 250 | regs->lp_count--; |
| 251 | } |
Vineet Gupta | 2e651ea | 2013-01-23 16:30:36 +0530 | [diff] [blame] | 252 | } |
| 253 | |
Vineet Gupta | ceed97a | 2014-10-02 12:30:42 +0530 | [diff] [blame] | 254 | perf_sw_event(PERF_COUNT_SW_ALIGNMENT_FAULTS, 1, regs, address); |
Vineet Gupta | 2e651ea | 2013-01-23 16:30:36 +0530 | [diff] [blame] | 255 | return 0; |
| 256 | |
| 257 | fault: |
| 258 | pr_err("Alignment trap: fault in fix-up %08lx at [<%08lx>]\n", |
| 259 | state.words[0], address); |
| 260 | |
| 261 | return 1; |
| 262 | } |