This is the mail archive of the
gdb-patches@sourceware.org
mailing list for the GDB project.
Displaced stepping 0003: for 16-bit Thumb instructions
- From: Yao Qi <yao at codesourcery dot com>
- To: Ulrich Weigand <uweigand at de dot ibm dot com>
- Cc: gdb-patches at sourceware dot org
- Date: Mon, 28 Feb 2011 09:50:36 +0800
- Subject: Displaced stepping 0003: for 16-bit Thumb instructions
- References: <201102172011.p1HKBa0Y002611@d06av02.portsmouth.uk.ibm.com>
On 02/18/2011 04:11 AM, Ulrich Weigand wrote:
>> > @@ -4338,10 +4341,15 @@ displaced_read_reg (struct regcache *regs, CORE_ADDR from, int regno)
>> >
>> > if (regno == 15)
>> > {
>> > + if (displaced_in_arm_mode (regs))
>> > + from += 8;
>> > + else
>> > + from += 6;
> I think the 6 is wrong, it should be 4. From the ARM manual:
>
> - When executing an ARM instruction, PC reads as the address of the
> current instruction plus 8.
> - When executing a Thumb instruction, PC reads as the address of the
> current instruction plus 4.
>
Oh, yes. Fixed.
>> > +/* Clean up branch instructions (actually perform the branch, by setting
>> > + PC). */
>> > +static void
>> > +cleanup_branch(struct gdbarch *gdbarch, struct regcache *regs,
>> > + struct displaced_step_closure *dsc)
>> > +{
>> > + ULONGEST from = dsc->insn_addr;
>> > + uint32_t status = displaced_read_reg (regs, from, ARM_PS_REGNUM);
>> > + int branch_taken = condition_true (dsc->u.branch.cond, status);
>> > +
>> > + cleanup_branch_1 (gdbarch, regs, dsc, branch_taken);
>> > +}
>> > +
>> > +static void
>> > +cleanup_cbz_cbnz(struct gdbarch *gdbarch, struct regcache *regs,
>> > + struct displaced_step_closure *dsc)
>> > +{
>> > + cleanup_branch_1 (gdbarch, regs, dsc, dsc->u.branch.cond);
>> > +}
> I think this is unnecessary: copy_cbz_cnbz ought to be able to use
> cleanup_branch as-is. If the branch is taken, it should just set
> dsc->u.branch.cond to INSN_AL; if the branch is not taken, it
> should simply not use any cleanup at all since no further action
> is required.
>
Done as you suggested.
>> > @@ -4718,6 +4752,40 @@ copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
>> >
>> > B<cond> similar, but don't set r14 in cleanup. */
>> >
>> > +
>> > + dsc->u.branch.cond = cond;
>> > + dsc->u.branch.link = link;
>> > + dsc->u.branch.exchange = exchange;
>> > +
>> > + if (arm_pc_is_thumb (gdbarch, from))
> You should never use arm_pc_is_thumb here; the heuristics it applies are
> completely unnecessary, since we know in which mode we are, and may just
> result in the wrong outcome.
>
> In any case, as discussed above, this ought to be two separate copy
> routines, one for ARM mode and one for Thumb mode anyway.
>
Yes, it is separated to two routines, arm_copy_b_bl_blx and
thumb2_copy_b_bl_blx.
>> > + {
>> > + /* Plus the size of THUMB_NOP and B/BL/BLX. */
>> > + dsc->u.branch.dest = from + 2 + 4 + offset;
>> > + RECORD_MOD_16BIT_INSN (0, THUMB_NOP);
> The + 2 doesn't look right to me. The offset is relative to the
> PC, which is -see above- "from + 8" in ARM mode and "from + 4" in
> Thumb mode. I don't see how the size of the THUMB_NOP is involved
> at all here ...
>
Oh, yes. Fixed.
>> > +static int
>> > +thumb_decode_dp (struct gdbarch *gdbarch, unsigned short insn,
>> > + struct displaced_step_closure *dsc)
>> > +{
>> > + /* 16-bit data-processing insns are not related to PC. */
>> > + return thumb_copy_unmodified_16bit (gdbarch, insn,"data-processing", dsc);
>> > +}
> This doesn't need to be a separate function, I guess ...
>
OK, fixed.
>> > + /* ADDS Rd, #imm8 */
>> > + RECORD_MOD_32BIT_INSN (0, 0x3000 | (rd << 8) | imm8);
> Should be 16BIT (but see my earlier mail on the usefulness of
> those macros in the first place ...).
>
Fixed.
>> > +static void
>> > +thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch,
>> > + unsigned short insn1, CORE_ADDR to,
> I don't think this needs TO.
>
Removed parameter TO.
>> > + unsigned short op = bits (insn1, 7, 9);
>> > + if (op == 6 || op == 7) /* BX or BLX */
>> > + err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
>> > + else
>> > + err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
>> > + dsc);
> These include the ADD / MOV / CMP high register instructions, which
> can access the PC, so they'd need special treatment
>
They are processed by thumb_copy_alu_reg now.
>> > + switch (bits (insn1, 8, 11))
>> > + {
>> > + case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
>> > + err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
>> > + break;
>> > + default:
>> > + err = thumb_copy_unmodified_16bit (gdbarch, insn1,"", dsc);
> Hmm, what about IT ?
>
So far, I don't have a good idea on support IT for displaced stepping.
I may need more time to think of this.
>> > + case 13: /* Conditional branch and supervisor call */
>> > + if (bits (insn1, 9, 11) != 7) /* conditional branch */
>> > + err = thumb_copy_b (gdbarch, insn1, dsc);
>> > + else
>> > + err = thumb_copy_unmodified_16bit (gdbarch, insn1,"svc", dsc);
> There is special handling in arm-linux-tdep.c for ARM SVC instructions.
> Don't we need this for Thumb SVC's as well?
>
Yes. We need that. SVC and Linux related stuff is new to me. I
suggest this patch can be in first if it is OK. I'll think of 'svc
stuff' later.
>> > +
>> > + if ((bits (insn1, 13, 15) == 7) && (bits (insn1, 11, 12)))
> You should just use thumb_insn_size ...
>
Fixed.
--
Yao (éå)
>From 894283a4b306230db70f4df0182b6995778007ea Mon Sep 17 00:00:00 2001
From: Yao Qi <yao@codesourcery.com>
Date: Sat, 26 Feb 2011 14:57:33 +0800
Subject: [PATCH 3/4] displaced stepping for thumb 16-bit insn
don't support IT in thumb-16bit
---
gdb/arm-tdep.c | 436 +++++++++++++++++++++++++++++++++++++++++++++++++++++++-
1 files changed, 435 insertions(+), 1 deletions(-)
diff --git a/gdb/arm-tdep.c b/gdb/arm-tdep.c
index 2d06d8e..269c583 100644
--- a/gdb/arm-tdep.c
+++ b/gdb/arm-tdep.c
@@ -5332,6 +5332,23 @@ arm_copy_unmodified (uint32_t insn, const char *iname,
return 0;
}
+/* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
+ modification. */
+static int
+thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, unsigned int insn,
+ const char *iname,
+ struct displaced_step_closure *dsc)
+{
+ if (debug_displaced)
+ fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x, "
+ "opcode/class '%s' unmodified\n", insn,
+ iname);
+
+ RECORD_THUMB_MODE_INSN (0, insn);
+
+ return 0;
+}
+
/* Preload instructions with immediate offset. */
static void
@@ -5558,6 +5575,45 @@ arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
return copy_b_bl_blx (gdbarch, cond, exchange, link, offset, regs, dsc);
}
+/* Copy B Thumb instructions. */
+static int
+thumb_copy_b (struct gdbarch *gdbarch, unsigned short insn,
+ struct displaced_step_closure *dsc)
+{
+ unsigned int cond = 0;
+ int offset = 0;
+ unsigned short bit_12_15 = bits (insn, 12, 15);
+ CORE_ADDR from = dsc->insn_addr;
+
+ if (bit_12_15 == 0xd)
+ {
+ offset = sbits (insn, 0, 7);
+ cond = bits (insn, 8, 11);
+ }
+ else if (bit_12_15 == 0xe)
+ {
+ offset = sbits (insn, 0, 10);
+ cond = INST_AL;
+ }
+
+ if (debug_displaced)
+ fprintf_unfiltered (gdb_stdlog,
+ "displaced: copying b immediate insn %.4x "
+ "with offset %d\n", insn, offset);
+
+ dsc->u.branch.cond = cond;
+ dsc->u.branch.link = 0;
+ dsc->u.branch.exchange = 0;
+ dsc->u.branch.dest = from + 4 + offset;
+
+ RECORD_THUMB_MODE_INSN (0, THUMB_NOP);
+
+
+ dsc->cleanup = &cleanup_branch;
+
+ return 0;
+}
+
/* Copy BX/BLX with register-specified destinations. */
static int
@@ -5609,6 +5665,25 @@ arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
return copy_bx_blx_reg (gdbarch, cond, link, rm, regs, dsc);
}
+static int
+thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
+ struct regcache *regs,
+ struct displaced_step_closure *dsc)
+{
+ int link = bit (insn, 7);
+ unsigned int rm = bits (insn, 3, 6);
+
+ if (debug_displaced)
+ fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x",
+ (unsigned short) insn);
+
+ /* Always true for thumb. */
+ dsc->u.branch.cond = INST_AL;
+ RECORD_THUMB_MODE_INSN (0, THUMB_NOP);
+
+ return copy_bx_blx_reg (gdbarch, INST_AL, link, rm, regs, dsc);
+}
+
/* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
static void
@@ -5753,6 +5828,31 @@ arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
return copy_alu_reg (gdbarch, regs, dsc, reg_ids);
}
+static int
+thumb_copy_alu_reg (struct gdbarch *gdbarch, unsigned short insn,
+ struct regcache *regs,
+ struct displaced_step_closure *dsc)
+{
+ unsigned int reg_ids[3];
+ ULONGEST rd_val, rn_val;
+ CORE_ADDR from = dsc->insn_addr;
+
+ reg_ids[1] = (bit (insn, 7) << 3) | bits (insn, 0, 2);
+ reg_ids[0] = bits (insn, 3, 6);
+ reg_ids[2] = 2;
+
+ if (reg_ids[0] != ARM_PC_REGNUM && reg_ids[1] != ARM_PC_REGNUM)
+ return thumb_copy_unmodified_16bit(gdbarch, insn, "ALU reg", dsc);
+
+ if (debug_displaced)
+ fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x\n",
+ "ALU", (unsigned short) insn);
+
+ RECORD_THUMB_MODE_INSN (0, ((insn & 0xff00) | 0x08));
+
+ return copy_alu_reg (gdbarch, regs, dsc, reg_ids);
+}
+
/* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
static void
@@ -7028,12 +7128,346 @@ arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn,
regs, dsc, ops);
}
+static int
+copy_pc_relative (struct regcache *regs, struct displaced_step_closure *dsc,
+ int rd, unsigned int imm, int is_32bit)
+{
+ int val;
+
+ /* ADR Rd, #imm
+
+ Rewrite as:
+
+ Preparation: Rd <- PC
+ Insn: ADD Rd, #imm
+ Cleanup: Null.
+ */
+
+ /* Rd <- PC */
+ val = displaced_read_reg (regs, dsc->insn_addr, ARM_PC_REGNUM);
+ displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
+
+ if (is_32bit)
+ {
+ /* Encoding T3: ADDS Rd, Rd, #imm */
+ RECORD_THUMB_MODE_INSN (0, 0xf100 | rd);
+ RECORD_THUMB_MODE_INSN (1, 0x0 | (rd << 8) | imm);
+
+ dsc->numinsns = 2;
+ }
+ else
+ /* Encoding T2: ADDS Rd, #imm */
+ RECORD_THUMB_MODE_INSN (0, 0x3000 | (rd << 8) | imm);
+
+ return 0;
+}
+
+static int
+thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, unsigned short insn,
+ struct regcache *regs,
+ struct displaced_step_closure *dsc)
+{
+ unsigned int rd = bits (insn, 8, 10);
+ unsigned int imm8 = bits (insn, 0, 7);
+
+ if (debug_displaced)
+ fprintf_unfiltered (gdb_stdlog,
+ "displaced: copying thumb adr r%d, #%d insn %.4x\n",
+ rd, imm8, insn);
+
+ return copy_pc_relative (regs, dsc, rd, imm8, 0);
+}
+
+static int
+thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, unsigned short insn1,
+ struct regcache *regs,
+ struct displaced_step_closure *dsc)
+{
+ unsigned int rt = bits (insn1, 8, 7);
+ unsigned int pc;
+ int imm8 = sbits (insn1, 0, 7);
+ CORE_ADDR from = dsc->insn_addr;
+
+ /* LDR Rd, #imm8
+
+ Rwrite as:
+
+ Preparation: tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
+ if (Rd is not R0) tmp0 <- R0;
+ Insn: LDR R0, [R2, R3];
+ Cleanup: R2 <- tmp2, R3 <- tmp3,
+ if (Rd is not R0) Rd <- R0, R0 <- tmp0 */
+
+ if (debug_displaced)
+ fprintf_unfiltered (gdb_stdlog, "displaced: copying thumb ldr literal "
+ "insn %.4x\n", insn1);
+
+ dsc->tmp[0] = displaced_read_reg (regs, from, 0);
+ dsc->tmp[2] = displaced_read_reg (regs, from, 2);
+ dsc->tmp[3] = displaced_read_reg (regs, from, 3);
+ pc = displaced_read_reg (regs, from, ARM_PC_REGNUM);
+
+ displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
+ displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
+
+ dsc->rd = rt;
+ dsc->u.ldst.xfersize = 4;
+ dsc->u.ldst.rn = 0;
+ dsc->u.ldst.immed = 0;
+ dsc->u.ldst.writeback = 0;
+ dsc->u.ldst.restore_r4 = 0;
+
+ RECORD_THUMB_MODE_INSN (0, 0x58d0); /* ldr r0, [r2, r3]*/
+
+ dsc->cleanup = &cleanup_load;
+
+ return 0;
+}
+
+/* Copy Thumb cbnz/cbz insruction. */
+
+static int
+thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, unsigned short insn1,
+ struct regcache *regs,
+ struct displaced_step_closure *dsc)
+{
+ int non_zero = bit (insn1, 11);
+ unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
+ CORE_ADDR from = dsc->insn_addr;
+ int rn = bits (insn1, 0, 2);
+ int rn_val = displaced_read_reg (regs, from, rn);
+
+ dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
+ /* CBNZ and CBZ do not affect the condition flags. If condition is true,
+ set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
+ condition is false, let it be, cleanup_branch will do nothing. */
+ if (dsc->u.branch.cond)
+ dsc->u.branch.cond = INST_AL;
+
+ dsc->u.branch.link = 0;
+ dsc->u.branch.exchange = 0;
+
+ dsc->u.branch.dest = from + 2 + imm5;
+
+ if (debug_displaced)
+ fprintf_unfiltered (gdb_stdlog, "displaced: copying %s [r%d = 0x%x]"
+ " insn %.4x to %.8lx\n", non_zero ? "cbnz" : "cbz",
+ rn, rn_val, insn1, dsc->u.branch.dest);
+
+ RECORD_THUMB_MODE_INSN (0, THUMB_NOP);
+
+ dsc->cleanup = &cleanup_branch;
+ return 0;
+}
+
+static void
+cleanup_pop_pc_16bit(struct gdbarch *gdbarch, struct regcache *regs,
+ struct displaced_step_closure *dsc)
+{
+ CORE_ADDR from = dsc->insn_addr;
+ int rx = dsc->u.block.regmask ? 8 : 0;
+ int rx_val = displaced_read_reg (regs, from, rx);
+
+ displaced_write_reg (regs, dsc, ARM_PC_REGNUM, rx_val, BX_WRITE_PC);
+ displaced_write_reg (regs, dsc, rx, dsc->tmp[0], CANNOT_WRITE_PC);
+}
+
+static int
+thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, unsigned short insn1,
+ struct regcache *regs,
+ struct displaced_step_closure *dsc)
+{
+ CORE_ADDR from = dsc->insn_addr;
+
+ dsc->u.block.regmask = insn1 & 0x00ff;
+
+ /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
+ to :
+
+ (1) register list is not empty,
+ Prepare: tmp[0] <- r8,
+
+ POP {rX}; PC is stored in rX
+ MOV r8, rX; finally, PC is stored in r8
+ POP {rX, rY, ...., rZ}
+
+ Cleanup: PC <-r8, r8 <- tmp[0]
+
+ (2) register list is empty,
+ Prepare: tmp[0] <- r0,
+
+ POP {r0}
+
+ Cleanup: PC <- r0, r0 <- tmp[0]
+ */
+ if (debug_displaced)
+ fprintf_unfiltered (gdb_stdlog,
+ "displaced: copying thumb pop {%.8x, pc} insn %.4x\n",
+ dsc->u.block.regmask, insn1);
+
+ if (dsc->u.block.regmask != 0)
+ {
+ int rx = 0;
+
+ dsc->tmp[0] = displaced_read_reg (regs, from, 8);
+
+ /* Look for the first register in register list. */
+ for (rx = 0; rx < 8; rx++)
+ if (dsc->u.block.regmask & (1 << rx))
+ break;
+
+ RECORD_THUMB_MODE_INSN (0, 0xbc00 | (1 << rx)); /* POP {rX} */
+ RECORD_THUMB_MODE_INSN (1, 0x4680 | (rx << 3)); /* MOV r8, rX */
+ RECORD_THUMB_MODE_INSN (2, insn1 & 0xfeff); /* POP {rX, rY, ..., rZ} */
+ /* RECORD_THUMB_MODE_INSN (3, 0x46c7); */ /* MOV PC, r8 */
+
+ dsc->numinsns = 3;
+ }
+ else
+ {
+ dsc->tmp[0] = displaced_read_reg (regs, from, 0);
+
+ RECORD_THUMB_MODE_INSN (0, 0xbc00); /* POP {r0} */
+ /* RECORD_THUMB_MODE_INSN (1, 0x4683); */ /* MOV PC, r0 */
+
+ dsc->numinsns = 1;
+ }
+
+ dsc->cleanup = &cleanup_pop_pc_16bit;
+ return 0;
+}
+
+static void
+thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch,
+ unsigned short insn1, struct regcache *regs,
+ struct displaced_step_closure *dsc)
+{
+ unsigned short op_bit_12_15 = bits (insn1, 12, 15);
+ unsigned short op_bit_10_11 = bits (insn1, 10, 11);
+ int err = 0;
+
+ /* 16-bit thumb instructions. */
+ switch (op_bit_12_15)
+ {
+ /* Shift (imme), add, subtract, move and compare*/
+ case 0: case 1: case 2: case 3:
+ err = thumb_copy_unmodified_16bit (gdbarch, insn1,"shift/add/sub/mov/cmp",
+ dsc);
+ break;
+ case 4:
+ switch (op_bit_10_11)
+ {
+ case 0: /* Data-processing */
+ err = thumb_copy_unmodified_16bit (gdbarch, insn1,"data-processing",
+ dsc);
+ break;
+ case 1: /* Special data instructions and branch and exchange */
+ {
+ unsigned short op = bits (insn1, 7, 9);
+ if (op == 6 || op == 7) /* BX or BLX */
+ err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
+ else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
+ err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
+ else
+ err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
+ dsc);
+ }
+ break;
+ default: /* LDR (literal) */
+ err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
+ }
+ break;
+ case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
+ err = thumb_copy_unmodified_16bit (gdbarch, insn1,"ldr/str", dsc);
+ break;
+ case 10:
+ if (op_bit_10_11 < 2) /* Generate PC-relative address */
+ err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
+ else /* Generate SP-relative address */
+ err = thumb_copy_unmodified_16bit (gdbarch, insn1,"sp-relative", dsc);
+ break;
+ case 11: /* Misc 16-bit instructions */
+ {
+ switch (bits (insn1, 8, 11))
+ {
+ case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
+ err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
+ break;
+ case 12: case 13: /* POP */
+ if (bit (insn1, 8)) /* PC is in register list. */
+ {
+ err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
+ }
+ else
+ err = thumb_copy_unmodified_16bit (gdbarch, insn1,"pop", dsc);
+ break;
+ case 15: /* If-Then, and hints */
+ if (bits (insn1, 0, 3))
+ err = 1; /* Not supported If-Then */
+ else
+ err = thumb_copy_unmodified_16bit (gdbarch, insn1,"hints", dsc);
+ break;
+ default:
+ err = thumb_copy_unmodified_16bit (gdbarch, insn1,"misc", dsc);
+ }
+ }
+ break;
+ case 12:
+ if (op_bit_10_11 < 2) /* Store multiple registers */
+ err = thumb_copy_unmodified_16bit (gdbarch, insn1,"stm", dsc);
+ else /* Load multiple registers */
+ err = thumb_copy_unmodified_16bit (gdbarch, insn1,"ldm", dsc);
+ break;
+ case 13: /* Conditional branch and supervisor call */
+ if (bits (insn1, 9, 11) != 7) /* conditional branch */
+ err = thumb_copy_b (gdbarch, insn1, dsc);
+ else
+ err = thumb_copy_unmodified_16bit (gdbarch, insn1,"svc", dsc);
+ break;
+ case 14: /* Unconditional branch */
+ err = thumb_copy_b (gdbarch, insn1, dsc);
+ break;
+ default:
+ internal_error (__FILE__, __LINE__,
+ _("thumb_process_displaced_insn: Instruction decode error"));
+ }
+
+ if (err)
+ internal_error (__FILE__, __LINE__,
+ _("thumb_process_displaced_insn: Instruction decode error"));
+}
+
+static void
+thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
+ uint16_t insn2, struct regcache *regs,
+ struct displaced_step_closure *dsc)
+{
+ error (_("Displaced stepping is only supported in ARM mode and Thumb 16bit instructions"));
+}
+
static void
thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
CORE_ADDR to, struct regcache *regs,
struct displaced_step_closure *dsc)
{
- error (_("Displaced stepping is only supported in ARM mode"));
+ enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
+ unsigned short insn1
+ = read_memory_unsigned_integer (from, 2, byte_order_for_code);
+
+ if (debug_displaced)
+ fprintf_unfiltered (gdb_stdlog, "displaced: process thumb insn %.4x "
+ "at %.8lx\n", insn1, (unsigned long) from);
+
+ dsc->is_thumb = 1;
+ dsc->insn_size = thumb_insn_size (insn1);
+ if (thumb_insn_size (insn1) == 4)
+ {
+ unsigned short insn2
+ = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
+ thumb_process_displaced_32bit_insn(gdbarch, insn1, insn2, regs, dsc);
+ }
+ else
+ thumb_process_displaced_16bit_insn(gdbarch, insn1, regs, dsc);
}
void
--
1.7.0.4