/*
* arch/arm/kernel/kprobes-decode.c
*
* Copyright (C) 2006, 2007 Motorola Inc.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License version 2 as
* published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* General Public License for more details.
*/
/*
* We do not have hardware single-stepping on ARM, This
* effort is further complicated by the ARM not having a
* "next PC" register. Instructions that change the PC
* can't be safely single-stepped in a MP environment, so
* we have a lot of work to do:
*
* In the prepare phase:
* *) If it is an instruction that does anything
* with the CPU mode, we reject it for a kprobe.
* (This is out of laziness rather than need. The
* instructions could be simulated.)
*
* *) Otherwise, decode the instruction rewriting its
* registers to take fixed, ordered registers and
* setting a handler for it to run the instruction.
*
* In the execution phase by an instruction's handler:
*
* *) If the PC is written to by the instruction, the
* instruction must be fully simulated in software.
* If it is a conditional instruction, the handler
* will use insn[0] to copy its condition code to
* set r0 to 1 and insn[1] to "mov pc, lr" to return.
*
* *) Otherwise, a modified form of the instruction is
* directly executed. Its handler calls the
* instruction in insn[0]. In insn[1] is a
* "mov pc, lr" to return.
*
* Before calling, load up the reordered registers
* from the original instruction's registers. If one
* of the original input registers is the PC, compute
* and adjust the appropriate input register.
*
* After call completes, copy the output registers to
* the original instruction's original registers.
*
* We don't use a real breakpoint instruction since that
* would have us in the kernel go from SVC mode to SVC
* mode losing the link register. Instead we use an
* undefined instruction. To simplify processing, the
* undefined instruction used for kprobes must be reserved
* exclusively for kprobes use.
*
* TODO: ifdef out some instruction decoding based on architecture.
*/
#include <linux/kernel.h>
#include <linux/kprobes.h>
#define sign_extend(x, signbit) ((x) | (0 - ((x) & (1 << (signbit)))))
#define branch_displacement(insn) sign_extend(((insn) & 0xffffff) << 2, 25)
#define PSR_fs (PSR_f|PSR_s)
#define KPROBE_RETURN_INSTRUCTION 0xe1a0f00e /* mov pc, lr */
#define SET_R0_TRUE_INSTRUCTION 0xe3a00001 /* mov r0, #1 */
#define truecc_insn(insn) (((insn) & 0xf0000000) | \
(SET_R0_TRUE_INSTRUCTION & 0x0fffffff))
typedef long (insn_0arg_fn_t)(void);
typedef long (insn_1arg_fn_t)(long);
typedef long (insn_2arg_fn_t)(long, long);
typedef long (insn_3arg_fn_t)(long, long, long);
typedef long (insn_4arg_fn_t)(long, long, long, long);
typedef long long (insn_llret_0arg_fn_t)(void);
typedef long long (insn_llret_3arg_fn_t)(long, long, long);
typedef long long (insn_llret_4arg_fn_t)(long, long, long, long);
union reg_pair {
long long dr;
#ifdef __LITTLE_ENDIAN
struct { long r0, r1; };
#else
struct { long r1, r0; };
#endif
};
/*
* For STR and STM instructions, an ARM core may choose to use either
* a +8 or a +12 displacement from the current instruction's address.
* Whichever value is chosen for a given core, it must be the same for
* both instructions and may not change. This function measures it.
*/
static int str_pc_offset;
static void __init find_str_pc_offset(void)
{
int addr, scratch, ret;
__asm__ (
"sub %[ret], pc, #4 \n\t"
"str pc, %[addr] \n\t"
"ldr %[scr], %[addr] \n\t"
"sub %[ret], %[scr], %[ret] \n\t"
: [ret] "=r" (ret), [scr] "=r" (scratch), [addr] "+m" (addr));
str_pc_offset = ret;
}
/*
* The insnslot_?arg_r[w]flags() functions below are to keep the
* msr -> *fn -> mrs instruction sequences indivisible so that
* the state of the CPSR flags aren't inadvertently modified
* just before or just after the call.
*/
static inline long __kprobes
insnslot_0arg_rflags(long cpsr, insn_0arg_fn_t *fn)
{
register long ret asm("r0");
__asm__ __volatile__ (
"msr cpsr_fs, %[cpsr] \n\t"
"mov lr, pc \n\t"
"mov pc, %[fn] \n\t"
: "=r" (ret)
: [cpsr] "r" (cpsr), [fn] "r" (fn)
: "lr", "cc"
);
return ret;
}
static inline long long __kprobes
insnslot_llret_0arg_rflags(long cpsr, insn_llret_0arg_fn_t *fn)
{
register long ret0 asm("r0");
register long ret1 asm("r1");
union reg_pair fnr;
__asm__ __volatile__ (
"msr cpsr_fs, %[cpsr] \n\t"
"mov lr, pc \n\t"
"mov pc, %[fn] \n\t"
: "=r" (ret0), "=r" (ret1)
: [cpsr] "r" (cpsr), [fn] "r" (fn)
: "lr", "cc"
);
fnr.r0 = ret0;
fnr.r1 = ret1;
return fnr.dr;
}
static inline long __kprobes
insnslot_1arg_rflags(long r0, long cpsr, insn_1arg_fn_t *fn)
{
register long rr0 asm("r0") = r0;
register long ret asm("r0");
__asm__ __volatile__ (
"msr cpsr_fs, %[cpsr] \n\t"
"mov lr, pc \n\t"
"mov pc, %[fn] \n\t"
: "=r" (ret)
: "0" (rr0), [cpsr] "r" (cpsr), [fn] "r" (fn)
: "lr", "cc"
);
return ret;
}
static inline long __kprobes
insnslot_2arg_rflags(long r0, long r1, long cpsr, insn_2arg_fn_t *fn)
{
register long rr0 asm("r0") = r0;
register long rr1 asm("r1") = r1;
register long ret asm("r0");
__asm__ __volatile__ (
"msr cpsr_fs, %[cpsr] \n\t"
"mov lr, pc \n\t"
"mov pc, %[fn] \n\t"
: "=r" (ret)
: "0" (rr0), "r" (rr1),
[cpsr] "r" (cpsr), [fn] "r" (fn)
: "lr", "cc"
);
return ret;
}
static inline long __kprobes
insnslot_3arg_rflags(long r0, long r1, long r2, long cpsr, insn_3arg_fn_t *fn)
{
register long rr0 asm("r0") = r0;
register long rr1 asm("r1") = r1;
register long rr2 asm("r2") = r2;
register long ret asm("r0");
__asm__ __volatile__ (
"msr cpsr_fs, %[cpsr] \n\t"
"mov lr, pc \n\t"
"mov pc, %[fn] \n\t"
: "=r" (ret)
: "0" (rr0), "r" (rr1), "r" (rr2),
[cpsr] "r" (cpsr), [fn] "r" (fn)
: "lr", "cc"
);
return ret;
}
static inline long long __kprobes
insnslot_llret_3arg_rflags(long r0, long r1, long r2, long cpsr,
insn_llret_3arg_fn_t *fn)
{
register long rr0 asm("r0") = r0;
register long rr1 asm("r1") = r1;
register long rr2 asm("r2") = r2;
register long ret0 asm("r0");
register long ret1 asm("r1");
union reg_pair fnr;
__asm__ __volatile__ (
"msr cpsr_fs, %[cpsr] \n\t"
"mov lr, pc \n\t"
"mov pc, %[fn] \n\t"
: "=r" (ret0), "=r" (ret1)
: "0" (rr0), "r" (rr1), "r" (rr2),
[cpsr] "r" (cpsr), [fn] "r" (fn)
: "lr", "cc"
);
fnr.r0 = ret0;
fnr.r1 = ret1;
return fnr.dr;
}
static inline long __kprobes
insnslot_4arg_rflags(long r0, long r1, long r2, long r3, long cpsr,
insn_4arg_fn_t *fn)
{
register long rr0 asm("r0") = r0;
register long rr1 asm("r1") = r1;
register long rr2 asm("r2") = r2;
register long rr3 asm("r3") = r3;
register long ret asm("r0");
__asm__ __volatile__ (
"msr cpsr_fs, %[cpsr] \n\t"
"mov lr, pc \n\t"
"mov pc, %[fn] \n\t"
: "=r" (ret)
: "0" (rr0), "r" (rr1), "r" (rr2), "r" (rr3),
[cpsr] "r" (cpsr), [fn] "r" (fn)
: "lr", "cc"
);
return ret;
}
static inline long __kprobes
insnslot_1arg_rwflags(long r0, long *cpsr
|