This is the mail archive of the gdb-patches@sourceware.org mailing list for the GDB project.


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]
Other format: [Raw text]

[try 2nd 5/8] Displaced stepping for Thumb 32-bit insns


Displaced stepping for 32-bit Thumb instructions.

-- 
Yao (éå)
2011-03-24  Yao Qi  <yao@codesourcery.com>

	* gdb/arm-tdep.c (thumb_copy_unmodified_32bit): New.
	(thumb2_copy_preload): New.
	(thumb2_copy_preload_reg): New.
	(thumb2_copy_copro_load_store): New.
	(thumb2_copy_b_bl_blx): New.
	(thumb2_copy_alu_reg): New.
	(thumb2_copy_ldr_str_ldrb_strb): New.
	(thumb2_copy_block_xfer): New.
	(thumb_32bit_copy_undef): New.
	(thumb2_decode_ext_reg_ld_st): New.
	(thumb2_decode_svc_copro): New.
	(thumb_decode_pc_relative_32bit): New.
	(decode_thumb_32bit_ld_mem_hints): New.
	(thumb_process_displaced_32bit_insn): Process 32-bit Thumb insn.
---
 gdb/arm-tdep.c |  701 +++++++++++++++++++++++++++++++++++++++++++++++++++++++-
 1 files changed, 695 insertions(+), 6 deletions(-)

diff --git a/gdb/arm-tdep.c b/gdb/arm-tdep.c
index a356451..6ba7b5b 100644
--- a/gdb/arm-tdep.c
+++ b/gdb/arm-tdep.c
@@ -5333,6 +5333,23 @@ arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn,
   return 0;
 }
 
+static int
+thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, unsigned int insn1,
+			     unsigned int insn2, const char *iname,
+			     struct displaced_step_closure *dsc)
+{
+  if (debug_displaced)
+    fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x %.4x, "
+			"opcode/class '%s' unmodified\n", insn1, insn2,
+			iname);
+
+  dsc->modinsn[0] = insn1;
+  dsc->modinsn[1] = insn2;
+  dsc->numinsns = 2;
+
+  return 0;
+}
+
 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
    modification.  */
 static int
@@ -5400,6 +5417,27 @@ arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
   return 0;
 }
 
+static int
+thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
+		     struct regcache *regs, struct displaced_step_closure *dsc)
+{
+  unsigned int rn = bits (insn1, 0, 3);
+  if (rn == ARM_PC_REGNUM)
+    return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
+
+  if (debug_displaced)
+    fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.4x%.4x\n",
+			insn1, insn2);
+
+  dsc->modinsn[0] = insn1 & 0xfff0;
+  dsc->modinsn[1] = insn2;
+  dsc->numinsns = 2;
+
+  install_preload (gdbarch, regs, dsc, rn);
+
+  return 0;
+}
+
 /* Preload instructions with register offset.  */
 
 static void
@@ -5448,6 +5486,31 @@ arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
   return 0;
 }
 
+static int
+thumb2_copy_preload_reg (struct gdbarch *gdbarch, uint16_t insn1,
+			 uint16_t insn2, struct regcache *regs,
+			 struct displaced_step_closure *dsc)
+{
+  unsigned int rn = bits (insn1, 0, 3);
+  unsigned int rm = bits (insn2, 0, 3);
+
+
+  if (rn != ARM_PC_REGNUM && rm != ARM_PC_REGNUM)
+    return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload reg",
+					dsc);
+
+  if (debug_displaced)
+    fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.4x%.4x\n",
+			insn1, insn1);
+
+  dsc->modinsn[0] = insn1 & 0xfff0;
+  dsc->modinsn[1] = (insn2 & 0xfff0) | 0x1;
+  dsc->numinsns = 2;
+
+  install_preload_reg (gdbarch, regs, dsc, rn, rm);
+  return 0;
+}
+
 /* Copy/cleanup coprocessor load and store instructions.  */
 
 static void
@@ -5500,6 +5563,33 @@ copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
   return 0;
 }
 
+static int
+thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
+			      uint16_t insn2, struct regcache *regs,
+			      struct displaced_step_closure *dsc)
+{
+  unsigned int rn = bits (insn1, 0, 3);
+
+  if (rn == ARM_PC_REGNUM)
+    return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
+					"copro load/store", dsc);
+
+  if (debug_displaced)
+    fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
+			"load/store insn %.4x%.4x\n", insn1, insn2);
+
+  dsc->u.ldst.writeback = bit (insn1, 9);
+  dsc->u.ldst.rn = rn;
+
+  dsc->modinsn[0] = insn1 & 0xfff0;
+  dsc->modinsn[1] = insn2;
+  dsc->numinsns = 2;
+
+  install_copy_copro_load_store (gdbarch, regs, dsc);
+
+  return 0;
+}
+
 /* Clean up branch instructions (actually perform the branch, by setting
    PC).  */
 
@@ -5584,6 +5674,58 @@ arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
   return install_b_bl_blx (gdbarch, cond, exchange, link, offset, regs, dsc);
 }
 
+static int
+thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, unsigned short insn1,
+		      unsigned short insn2, struct regcache *regs,
+		      struct displaced_step_closure *dsc)
+{
+  int link = bit (insn2, 14);
+  int exchange = link && !bit (insn2, 12);
+  int cond = INST_AL;
+  long offset =0;
+  int j1 = bit (insn2, 13);
+  int j2 = bit (insn2, 11);
+  int s = sbits (insn1, 10, 10);
+  int i1 = !(j1 ^ bit (insn1, 10));
+  int i2 = !(j2 ^ bit (insn1, 10));
+
+  if (!link && !exchange) /* B */
+    {
+      cond = bits (insn1, 6, 9);
+      offset = (bits (insn2, 0, 10) << 1);
+      if (bit (insn2, 12)) /* Encoding T4 */
+	{
+	  offset |= (bits (insn1, 0, 9) << 12)
+	    | (i2 << 22)
+	    | (i1 << 23)
+	    | (s << 24);
+	}
+      else /* Encoding T3 */
+	offset |= (bits (insn1, 0, 5) << 12)
+	  | (j1 << 18)
+	  | (j2 << 19)
+	  | (s << 20);
+    }
+  else
+    {
+      offset = (bits (insn1, 0, 9) << 12);
+      offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
+      offset |= exchange ?
+	(bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
+    }
+
+  if (debug_displaced)
+    fprintf_unfiltered (gdb_stdlog, "displaced: copying %s immediate insn "
+			"%.4x %.4x with offset %.8lx\n",
+			(exchange) ? "blx" : "bl",
+			insn1, insn2, offset);
+
+  dsc->u.branch.dest = dsc->insn_addr + 4 + offset;
+  dsc->modinsn[0] = THUMB_NOP;
+
+  return install_b_bl_blx (gdbarch, cond, exchange, 1, offset, regs, dsc);
+}
+
 /* Copy B Thumb instructions.  */
 static int
 thumb_copy_b (struct gdbarch *gdbarch, unsigned short insn,
@@ -5849,6 +5991,40 @@ thumb_copy_alu_reg (struct gdbarch *gdbarch, unsigned short insn,
   return install_alu_reg (gdbarch, regs, dsc);
 }
 
+static int
+thumb2_copy_alu_reg (struct gdbarch *gdbarch, unsigned short insn1,
+		     unsigned short insn2, struct regcache *regs,
+		     struct displaced_step_closure *dsc)
+{
+  unsigned int op2 = bits (insn2, 4, 7);
+  int is_mov = (op2 == 0x0);
+
+  dsc->u.alu_reg.rn = bits (insn1, 0, 3); /* Rn */
+  dsc->u.alu_reg.rm = bits (insn2, 0, 3); /* Rm */
+  dsc->rd = bits (insn2, 8, 11); /* Rd */
+
+  /* In Thumb-2, rn, rm and rd can't be r15.  */
+  if (dsc->u.alu_reg.rn != ARM_PC_REGNUM
+      && dsc->u.alu_reg.rm != ARM_PC_REGNUM
+      && dsc->rd != ARM_PC_REGNUM)
+    return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU reg", dsc);
+
+  if (debug_displaced)
+    fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x%.4x\n",
+			"ALU", insn1, insn2);
+
+  if (is_mov)
+    dsc->modinsn[0] = insn1;
+  else
+    dsc->modinsn[0] = ((insn1 & 0xfff0) | 0x1);
+
+  dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x2);
+  dsc->numinsns = 2;
+
+  return install_alu_reg (gdbarch, regs, dsc);
+
+}
+
 /* Cleanup/copy arithmetic/logic insns with shifted register RHS.  */
 
 static void
@@ -6117,6 +6293,69 @@ install_ldr_str_ldrb_strb (struct gdbarch *gdbarch, struct regcache *regs,
 }
 
 static int
+thumb2_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, unsigned short insn1,
+			       unsigned short insn2,  struct regcache *regs,
+			       struct displaced_step_closure *dsc,
+			       int load, int byte, int usermode, int writeback)
+{
+  int immed = !bit (insn1, 9);
+  unsigned int rt = bits (insn2, 12, 15);
+  unsigned int rn = bits (insn1, 0, 3);
+  unsigned int rm = bits (insn2, 0, 3);  /* Only valid if !immed.  */
+
+  if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM && rm != ARM_PC_REGNUM)
+    return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load/store",
+					dsc);
+
+  if (debug_displaced)
+    fprintf_unfiltered (gdb_stdlog,
+			"displaced: copying %s%s r%d [r%d] insn %.4x%.4x\n",
+			load ? (byte ? "ldrb" : "ldr")
+			     : (byte ? "strb" : "str"), usermode ? "t" : "",
+			rt, rn, insn1, insn2);
+
+  dsc->rd = rt;
+  dsc->u.ldst.rn = rn;
+
+  install_ldr_str_ldrb_strb (gdbarch, regs, dsc, load, byte, usermode,
+			  writeback, rm, immed);
+
+  if (load || rt != ARM_PC_REGNUM)
+    {
+      dsc->u.ldst.restore_r4 = 0;
+
+      if (immed)
+	/* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
+	   ->
+	   {ldr,str}[b]<cond> r0, [r2, #imm].  */
+	{
+	  dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
+	  dsc->modinsn[1] = insn2 & 0x0fff;
+	}
+      else
+	/* {ldr,str}[b]<cond> rt, [rn, rm], etc.
+	   ->
+	   {ldr,str}[b]<cond> r0, [r2, r3].  */
+	{
+	  dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
+	  dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
+	}
+
+      dsc->numinsns = 2;
+    }
+  else
+    {
+      /* In Thumb-32 instructions, the behavior is unpredictable when Rt is
+	 PC, while the behavior is undefined when Rn is PC.  Shortly, neither
+	 Rt nor Rn can be PC.  */
+
+      gdb_assert (0);
+    }
+
+  return 0;
+}
+
+static int
 arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
 			    struct regcache *regs,
 			    struct displaced_step_closure *dsc,
@@ -6508,6 +6747,87 @@ copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
   return 0;
 }
 
+static int
+thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
+			struct regcache *regs,
+			struct displaced_step_closure *dsc)
+{
+  int rn = bits (insn1, 0, 3);
+  int load = bit (insn1, 4);
+  int writeback = bit (insn1, 5);
+
+  /* Block transfers which don't mention PC can be run directly
+     out-of-line.  */
+  if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
+    return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
+
+  if (rn == ARM_PC_REGNUM)
+    {
+      warning (_("displaced: Unpredictable LDM or STM with "
+		 "base register r15"));
+      return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
+					  "unpredictable ldm/stm", dsc);
+    }
+
+  if (debug_displaced)
+    fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
+			"%.4x%.4x\n", insn1, insn2);
+
+  /* Clear bit 13, since it should be always zero.  */
+  dsc->u.block.regmask = (insn2 & 0xdfff);
+  dsc->u.block.rn = rn;
+
+  dsc->u.block.load = bit (insn1, 4);
+  dsc->u.block.user = bit (insn1, 6);
+  dsc->u.block.increment = bit (insn1, 7);
+  dsc->u.block.before = bit (insn1, 8);
+  dsc->u.block.writeback = writeback;
+  dsc->u.block.cond = INST_AL;
+
+  if (load)
+    {
+      if (dsc->u.block.regmask == 0xffff)
+	{
+	  /* This branch is impossible to happen.  */
+	  gdb_assert (0);
+	}
+      else
+	{
+	  unsigned int regmask = dsc->u.block.regmask;
+	  unsigned int num_in_list = bitcount (regmask), new_regmask, bit = 1;
+	  unsigned int to = 0, from = 0, i, new_rn;
+
+	  for (i = 0; i < num_in_list; i++)
+	    dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
+
+	  if (writeback)
+	    insn1 &= ~(1 << 5);
+
+	  new_regmask = (1 << num_in_list) - 1;
+
+	  if (debug_displaced)
+	    fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
+				"{..., pc}: original reg list %.4x, modified "
+				"list %.4x\n"), rn, writeback ? "!" : "",
+				(int) dsc->u.block.regmask, new_regmask);
+
+	  dsc->modinsn[0] = insn1;
+	  dsc->modinsn[1] = (new_regmask & 0xffff);
+	  dsc->numinsns = 2;
+
+	  dsc->cleanup = &cleanup_block_load_pc;
+	}
+    }
+  else
+    {
+      dsc->modinsn[0] = insn1;
+      dsc->modinsn[1] = insn2;
+      dsc->numinsns = 2;
+      dsc->cleanup = &cleanup_block_store_pc;
+    }
+  return 0;
+}
+
 /* Cleanup/copy SVC (SWI) instructions.  These two functions are overridden
    for Linux, where some SVC instructions must be treated specially.  */
 
@@ -6599,6 +6919,23 @@ copy_undef (struct gdbarch *gdbarch, uint32_t insn,
   return 0;
 }
 
+static int
+thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
+                       struct displaced_step_closure *dsc)
+{
+
+  if (debug_displaced)
+    fprintf_unfiltered (gdb_stdlog, "displaced: copying undefined insn "
+                       "%.4x %.4x\n", (unsigned short) insn1,
+                       (unsigned short) insn2);
+
+  dsc->modinsn[0] = insn1;
+  dsc->modinsn[1] = insn2;
+  dsc->numinsns = 2;
+
+  return 0;
+}
+
 /* Copy unpredictable instructions.  */
 
 static int
@@ -6993,6 +7330,43 @@ decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
   return 1;
 }
 
+/* Decode extension register load/store.  Exactly the same as
+   arm_decode_ext_reg_ld_st.  */
+
+static int
+thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
+			     uint16_t insn2,  struct regcache *regs,
+			     struct displaced_step_closure *dsc)
+{
+  unsigned int opcode = bits (insn1, 4, 8);
+
+  switch (opcode)
+    {
+    case 0x04: case 0x05:
+      return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
+					  "vfp/neon vmov", dsc);
+
+    case 0x08: case 0x0c: /* 01x00 */
+    case 0x0a: case 0x0e: /* 01x10 */
+    case 0x12: case 0x16: /* 10x10 */
+      return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
+					  "vfp/neon vstm/vpush", dsc);
+
+    case 0x09: case 0x0d: /* 01x01 */
+    case 0x0b: case 0x0f: /* 01x11 */
+    case 0x13: case 0x17: /* 10x11 */
+      return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
+					  "vfp/neon vldm/vpop", dsc);
+
+    case 0x10: case 0x14: case 0x18: case 0x1c:  /* vstr.  */
+    case 0x11: case 0x15: case 0x19: case 0x1d:  /* vldr.  */
+      return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
+    }
+
+  /* Should be unreachable.  */
+  return 1;
+}
+
 static int
 decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn, CORE_ADDR to,
 		  struct regcache *regs, struct displaced_step_closure *dsc)
@@ -7040,7 +7414,105 @@ decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn, CORE_ADDR to,
 }
 
 static int
-copy_pc_relative (struct regcache *regs, struct displaced_step_closure *dsc,
+thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
+			 uint16_t insn2, struct regcache *regs,
+			 struct displaced_step_closure *dsc)
+{
+  unsigned int coproc = bits (insn2, 8, 11);
+  unsigned int op1 = bits (insn1, 4, 9);
+  unsigned int bit_5_8 = bits (insn1, 5, 8);
+  unsigned int bit_9 = bit (insn1, 9);
+  unsigned int bit_4 = bit (insn1, 4);
+  unsigned int rn = bits (insn1, 0, 3);
+
+  if (bit_9 == 0)
+    {
+      if (bit_5_8 == 2)
+	{
+	  if ((coproc & 0xe) == 0xa) /* 64-bit xfer.  */
+	    return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
+						"neon 64bit xfer", dsc);
+	  else
+	    {
+	      if (bit_4) /* MRRC/MRRC2 */
+		return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
+						    "mrrc/mrrc2", dsc);
+	      else /* MCRR/MCRR2 */
+		return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
+						    "mcrr/mcrr2", dsc);
+	    }
+	}
+      else if (bit_5_8 == 0) /* UNDEFINED.  */
+	return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
+      else
+	{
+	   /*coproc is 101x.  SIMD/VFP, ext registers load/store.  */
+	  if ((coproc & 0xe) == 0xa)
+	    return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
+						dsc);
+	  else /* coproc is not 101x.  */
+	    {
+	      if (bit_4 == 0) /* STC/STC2.  */
+		return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
+						     regs, dsc);
+	      else
+		{
+		  if (rn == 0xf) /* LDC/LDC2 literal.  */
+		    return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
+							 regs, dsc);
+		  else /* LDC/LDC2 immeidate.  */
+		    return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
+							 regs, dsc);
+		}
+	    }
+	}
+    }
+  else
+    {
+      unsigned int op = bit (insn2, 4);
+      unsigned int bit_8 = bit (insn1, 8);
+
+      if (bit_8) /* Advanced SIMD */
+	return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
+					    "neon", dsc);
+      else
+	{
+	  /*coproc is 101x.  */
+	  if ((coproc & 0xe) == 0xa)
+	    {
+	      if (op) /* 8,16,32-bit xfer.  */
+		return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
+						    "neon 8/16/32 bit xfer",
+						    dsc);
+	      else /* VFP data processing.  */
+		return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
+						    "vfp dataproc", dsc);
+	    }
+	  else
+	    {
+	      if (op)
+		{
+		  if (bit_4) /* MRC/MRC2 */
+		    return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
+							"mrc/mrc2", dsc);
+		  else /* MCR/MCR2 */
+		     return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
+							"mcr/mcr2", dsc);
+		}
+	      else /* CDP/CDP 2 */
+		return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
+						    "cdp/cdp2", dsc);
+	    }
+	}
+    }
+
+
+
+  return 0;
+}
+
+static int
+decode_pc_relative (struct regcache *regs, struct displaced_step_closure *dsc,
 		  int rd, unsigned int imm, int is_32bit)
 {
   int val;
@@ -7086,7 +7558,27 @@ thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, unsigned short insn,
 			"displaced: copying thumb adr r%d, #%d insn %.4x\n",
 			rd, imm8, insn);
 
-  return copy_pc_relative (regs, dsc, rd, imm8, 0);
+  return decode_pc_relative (regs, dsc, rd, imm8, 0);
+}
+
+static int
+thumb_decode_pc_relative_32bit (struct gdbarch *gdbarch, unsigned short insn1,
+				unsigned short insn2, struct regcache *regs,
+				struct displaced_step_closure *dsc)
+{
+  unsigned int rd = bits (insn2, 8, 11);
+  /* Since immeidate has the same encoding in both ADR and ADDS, so we simply
+     extract raw immediate encoding rather than computing immediate.  When
+     generating ADDS instruction, we can simply perform OR operation to set
+     immediate into ADDS.  */
+  unsigned int imm = (insn2 & 0x70ff) | (bit (insn1, 10) << 26);
+
+  if (debug_displaced)
+    fprintf_unfiltered (gdb_stdlog,
+			"displaced: copying thumb adr r%d, #%d insn %.4x%.4x\n",
+			rd, imm, insn1, insn2);
+
+  return decode_pc_relative (regs, dsc, rd, imm, 1);
 }
 
 static int
@@ -7348,12 +7840,209 @@ thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch,
 		    _("thumb_process_displaced_insn: Instruction decode error"));
 }
 
+static int
+decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
+				 unsigned short insn1, unsigned short insn2,
+				 struct regcache *regs,
+				 struct displaced_step_closure *dsc)
+{
+  int rd = bits (insn2, 12, 15);
+  int user_mode = (bits (insn2, 8, 11) == 0xe);
+  int err = 0;
+  int writeback = 0;
+
+  switch (bits (insn1, 5, 6))
+    {
+    case 0: /* Load byte and memory hints */
+      if (rd == 0xf) /* PLD/PLI */
+	{
+	  if (bits (insn2, 6, 11))
+	    return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
+	  else
+	    return thumb2_copy_preload_reg (gdbarch, insn1, insn2, regs, dsc);
+	}
+      else
+	{
+	  int op1 = bits (insn1, 7, 8);
+
+	  if ((op1 == 0 || op1 == 2) && bit (insn2, 11))
+	    writeback = bit (insn2, 8);
+
+	  return thumb2_copy_ldr_str_ldrb_strb (gdbarch, insn1, insn2, regs,
+					       dsc, 1, 1, user_mode, writeback);
+	}
+
+      break;
+    case 1: /* Load halfword and memory hints */
+      if (rd == 0xf) /* PLD{W} and Unalloc memory hint */
+	{
+	  if (bits (insn2, 6, 11))
+	    return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
+	  else
+	    return thumb2_copy_preload_reg (gdbarch, insn1, insn2, regs, dsc);
+	}
+      else
+	{
+	  int op1 = bits (insn1, 7, 8);
+
+	  if ((op1 == 0 || op1 == 2) && bit (insn2, 11))
+	    writeback = bit (insn2, 8);
+	  return thumb2_copy_ldr_str_ldrb_strb (gdbarch, insn1, insn2, regs,
+					       dsc, 1, 0, user_mode, writeback);
+	}
+      break;
+    case 2: /* Load word */
+      {
+	int op1 = bits (insn1, 7, 8);
+
+	  if ((op1 == 0 || op1 == 2) && bit (insn2, 11))
+	    writeback = bit (insn2, 8);
+
+	return thumb2_copy_ldr_str_ldrb_strb (gdbarch, insn1, insn2, regs, dsc,
+					     1, 0, user_mode, writeback);
+	break;
+      }
+    default:
+      return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
+      break;
+    }
+  return 0;
+}
+
 static void
 thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
 				    uint16_t insn2, struct regcache *regs,
 				    struct displaced_step_closure *dsc)
 {
-  error (_("Displaced stepping is only supported in ARM mode and Thumb 16bit instructions"));
+  int err = 0;
+  unsigned short op = bit (insn2, 15);
+  unsigned int op1 = bits (insn1, 11, 12);
+
+  switch (op1)
+    {
+    case 1:
+      {
+	switch (bits (insn1, 9, 10))
+	  {
+	  case 0: /* load/store multiple */
+	    switch (bits (insn1, 7, 8))
+	      {
+	      case 0: case 3: /* SRS, RFE */
+		err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
+						   "srs/rfe", dsc);
+		break;
+	      case 1: case 2: /* LDM/STM/PUSH/POP */
+		/* These Thumb 32-bit insns have the same encodings as ARM
+		   counterparts.  */
+		err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
+	      }
+	    break;
+	  case 1:
+	    /* Data-processing (shift register).  In ARM archtecture reference
+	       manual, this entry is
+	       "Data-processing (shifted register) on page A6-31".  However,
+	    instructions in table A6-31 shows that they are `alu_reg'
+	    instructions.  There is no alu_shifted_reg instructions in
+	    Thumb-2.  */
+	    err = thumb2_copy_alu_reg (gdbarch, insn1, insn2, regs,
+					       dsc);
+	    break;
+	  default: /* Coprocessor instructions */
+	    /* Thumb 32bit coprocessor instructions have the same encoding
+	       as ARM's.  */
+	    err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
+	    break;
+	  }
+      break;
+      }
+    case 2: /* op1 = 2 */
+      if (op) /* Branch and misc control.  */
+	{
+	  if (bit (insn2, 14)) /* BLX/BL */
+	    err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
+	  else if (!bits (insn2, 12, 14) && bits (insn1, 8, 10) != 0x7)
+	    /* Conditional Branch */
+	    err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
+	  else
+	    err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
+					       "misc ctrl", dsc);
+	}
+      else
+	{
+	  if (bit (insn1, 9)) /* Data processing (plain binary imm) */
+	    {
+	      int op = bits (insn1, 4, 8);
+	      int rn = bits (insn1, 0, 4);
+	      if ((op == 0 || op == 0xa) && rn == 0xf)
+		err = thumb_decode_pc_relative_32bit (gdbarch, insn1, insn2,
+						      regs, dsc);
+	      else
+		err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
+						   "dp/pb", dsc);
+	    }
+	  else /* Data processing (modified immeidate) */
+	    err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
+					       "dp/mi", dsc);
+	}
+      break;
+    case 3: /* op1 = 3 */
+      switch (bits (insn1, 9, 10))
+	{
+	case 0:
+	  if (bit (insn1, 4))
+	    err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
+						   regs, dsc);
+	  else
+	    {
+	      if (bit (insn1, 8)) /* NEON Load/Store */
+		err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
+						   "neon elt/struct load/store",
+						   dsc);
+	      else /* Store single data item */
+		{
+		  int user_mode = (bits (insn2, 8, 11) == 0xe);
+		  int byte = (bits (insn1, 5, 7) == 0
+			      || bits (insn1, 5, 7) == 4);
+		  int writeback = 0;
+
+		  if (bits (insn1, 5, 7) < 3 && bit (insn2, 11))
+		    writeback = bit (insn2, 8);
+
+		  err = thumb2_copy_ldr_str_ldrb_strb (gdbarch, insn1, insn2,
+						       regs, dsc, 0, byte,
+						       user_mode, writeback);
+		}
+	    }
+	  break;
+	case 1: /* op1 = 3, bits (9, 10) == 1 */
+	  switch (bits (insn1, 7, 8))
+	    {
+	    case 0: case 1: /* Data processing (register) */
+	      err = thumb2_copy_alu_reg (gdbarch, insn1, insn2, regs, dsc);
+	      break;
+	    case 2: /* Multiply and absolute difference */
+	      err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
+						 "mul/mua/diff", dsc);
+	      break;
+	    case 3: /* Long multiply and divide */
+	      err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
+						 "lmul/lmua", dsc);
+	      break;
+	    }
+	  break;
+	default: /* Coprocessor instructions */
+	  err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
+	  break;
+	}
+      break;
+    default:
+      err = 1;
+    }
+
+  if (err)
+    internal_error (__FILE__, __LINE__,
+		    _("thumb_process_displaced_insn: Instruction decode error"));
+
 }
 
 static void
-- 
1.7.0.4


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]