1 /***************************************************************************
2 * Copyright (C) 2006 by Dominic Rath *
3 * Dominic.Rath@gmx.de *
5 * This program is free software; you can redistribute it and/or modify *
6 * it under the terms of the GNU General Public License as published by *
7 * the Free Software Foundation; either version 2 of the License, or *
8 * (at your option) any later version. *
10 * This program is distributed in the hope that it will be useful, *
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of *
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
13 * GNU General Public License for more details. *
15 * You should have received a copy of the GNU General Public License *
16 * along with this program; if not, write to the *
17 * Free Software Foundation, Inc., *
18 * 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *
19 ***************************************************************************/
26 #include "arm_disassembler.h"
27 #include "arm_simulator.h"
29 #include "binarybuffer.h"
33 u32
arm_shift(u8 shift
, u32 Rm
, u32 shift_amount
, u8
*carry
)
38 if (shift
== 0x0) /* LSL */
40 if ((shift_amount
> 0) && (shift_amount
<= 32))
42 return_value
= Rm
<< shift_amount
;
43 *carry
= Rm
>> (32 - shift_amount
);
45 else if (shift_amount
> 32)
50 else /* (shift_amount == 0) */
55 else if (shift
== 0x1) /* LSR */
57 if ((shift_amount
> 0) && (shift_amount
<= 32))
59 return_value
= Rm
>> shift_amount
;
60 *carry
= (Rm
>> (shift_amount
- 1)) & 1;
62 else if (shift_amount
> 32)
67 else /* (shift_amount == 0) */
72 else if (shift
== 0x2) /* ASR */
74 if ((shift_amount
> 0) && (shift_amount
<= 32))
76 /* right shifts of unsigned values are guaranteed to be logical (shift in zeroes)
77 * simulate an arithmetic shift (shift in signed-bit) by adding the signed-bit manually */
78 return_value
= Rm
>> shift_amount
;
80 return_value
|= 0xffffffff << (32 - shift_amount
);
82 else if (shift_amount
> 32)
86 return_value
= 0xffffffff;
95 else /* (shift_amount == 0) */
100 else if (shift
== 0x3) /* ROR */
102 if (shift_amount
== 0)
108 shift_amount
= shift_amount
% 32;
109 return_value
= (Rm
>> shift_amount
) | (Rm
<< (32 - shift_amount
));
110 *carry
= (return_value
>> 31) & 0x1;
113 else if (shift
== 0x4) /* RRX */
115 return_value
= Rm
>> 1;
124 u32
arm_shifter_operand(armv4_5_common_t
*armv4_5
, int variant
, union arm_shifter_operand shifter_operand
, u8
*shifter_carry_out
)
127 int instruction_size
;
129 if (armv4_5
->core_state
== ARMV4_5_STATE_ARM
)
130 instruction_size
= 4;
132 instruction_size
= 2;
134 *shifter_carry_out
= buf_get_u32(armv4_5
->core_cache
->reg_list
[ARMV4_5_CPSR
].value
, 29, 1);
136 if (variant
== 0) /* 32-bit immediate */
138 return_value
= shifter_operand
.immediate
.immediate
;
140 else if (variant
== 1) /* immediate shift */
142 u32 Rm
= buf_get_u32(ARMV4_5_CORE_REG_MODE(armv4_5
->core_cache
, armv4_5
->core_mode
, shifter_operand
.immediate_shift
.Rm
).value
, 0, 32);
144 /* adjust RM in case the PC is being read */
145 if (shifter_operand
.immediate_shift
.Rm
== 15)
146 Rm
+= 2 * instruction_size
;
148 return_value
= arm_shift(shifter_operand
.immediate_shift
.shift
, Rm
, shifter_operand
.immediate_shift
.shift_imm
, shifter_carry_out
);
150 else if (variant
== 2) /* register shift */
152 u32 Rm
= buf_get_u32(ARMV4_5_CORE_REG_MODE(armv4_5
->core_cache
, armv4_5
->core_mode
, shifter_operand
.register_shift
.Rm
).value
, 0, 32);
153 u32 Rs
= buf_get_u32(ARMV4_5_CORE_REG_MODE(armv4_5
->core_cache
, armv4_5
->core_mode
, shifter_operand
.register_shift
.Rs
).value
, 0, 32);
155 /* adjust RM in case the PC is being read */
156 if (shifter_operand
.register_shift
.Rm
== 15)
157 Rm
+= 2 * instruction_size
;
159 return_value
= arm_shift(shifter_operand
.immediate_shift
.shift
, Rm
, Rs
, shifter_carry_out
);
163 ERROR("BUG: shifter_operand.variant not 0, 1 or 2");
164 return_value
= 0xffffffff;
170 int pass_condition(u32 cpsr
, u32 opcode
)
172 switch ((opcode
& 0xf0000000) >> 28)
175 if (cpsr
& 0x40000000)
180 if (!(cpsr
& 0x40000000))
185 if (cpsr
& 0x20000000)
190 if (!(cpsr
& 0x20000000))
195 if (cpsr
& 0x80000000)
200 if (!(cpsr
& 0x80000000))
205 if (cpsr
& 0x10000000)
210 if (!(cpsr
& 0x10000000))
215 if ((cpsr
& 0x20000000) && !(cpsr
& 0x40000000))
220 if (!(cpsr
& 0x20000000) || (cpsr
& 0x40000000))
225 if (((cpsr
& 0x80000000) && (cpsr
& 0x10000000))
226 || (!(cpsr
& 0x80000000) && !(cpsr
& 0x10000000)))
231 if (((cpsr
& 0x80000000) && !(cpsr
& 0x10000000))
232 || (!(cpsr
& 0x80000000) && (cpsr
& 0x10000000)))
237 if (!(cpsr
& 0x40000000) &&
238 (((cpsr
& 0x80000000) && (cpsr
& 0x10000000))
239 || (!(cpsr
& 0x80000000) && !(cpsr
& 0x10000000))))
244 if ((cpsr
& 0x40000000) &&
245 (((cpsr
& 0x80000000) && !(cpsr
& 0x10000000))
246 || (!(cpsr
& 0x80000000) && (cpsr
& 0x10000000))))
256 ERROR("BUG: should never get here");
260 int thumb_pass_branch_condition(u32 cpsr
, u16 opcode
)
262 return pass_condition(cpsr
, (opcode
& 0x0f00) << 20);
265 /* simulate a single step (if possible)
266 * if the dry_run_pc argument is provided, no state is changed,
267 * but the new pc is stored in the variable pointed at by the argument
269 int arm_simulate_step(target_t
*target
, u32
*dry_run_pc
)
271 armv4_5_common_t
*armv4_5
= target
->arch_info
;
273 u32 current_pc
= buf_get_u32(armv4_5
->core_cache
->reg_list
[15].value
, 0, 32);
274 arm_instruction_t instruction
;
275 int instruction_size
;
277 if (armv4_5
->core_state
== ARMV4_5_STATE_ARM
)
279 /* get current instruction, and identify it */
280 target_read_u32(target
, current_pc
, &opcode
);
281 arm_evaluate_opcode(opcode
, current_pc
, &instruction
);
282 instruction_size
= 4;
284 /* check condition code (for all instructions) */
285 if (!pass_condition(buf_get_u32(armv4_5
->core_cache
->reg_list
[ARMV4_5_CPSR
].value
, 0, 32), opcode
))
289 *dry_run_pc
= current_pc
+ instruction_size
;
293 buf_set_u32(armv4_5
->core_cache
->reg_list
[15].value
, 0, 32, current_pc
+ instruction_size
);
301 target_read_u32(target
, current_pc
, &opcode
);
302 arm_evaluate_opcode(opcode
, current_pc
, &instruction
);
303 instruction_size
= 2;
305 /* check condition code (only for branch instructions) */
306 if ((!thumb_pass_branch_condition(buf_get_u32(armv4_5
->core_cache
->reg_list
[ARMV4_5_CPSR
].value
, 0, 32), opcode
)) &&
307 (instruction
.type
== ARM_B
))
311 *dry_run_pc
= current_pc
+ instruction_size
;
315 buf_set_u32(armv4_5
->core_cache
->reg_list
[15].value
, 0, 32, current_pc
+ instruction_size
);
322 /* examine instruction type */
324 /* branch instructions */
325 if ((instruction
.type
>= ARM_B
) && (instruction
.type
<= ARM_BLX
))
329 if (instruction
.info
.b_bl_bx_blx
.reg_operand
== -1)
331 target
= instruction
.info
.b_bl_bx_blx
.target_address
;
335 target
= buf_get_u32(ARMV4_5_CORE_REG_MODE(armv4_5
->core_cache
, armv4_5
->core_mode
, instruction
.info
.b_bl_bx_blx
.reg_operand
).value
, 0, 32);
340 *dry_run_pc
= target
;
345 if (instruction
.type
== ARM_B
)
347 buf_set_u32(armv4_5
->core_cache
->reg_list
[15].value
, 0, 32, target
);
349 else if (instruction
.type
== ARM_BL
)
351 u32 old_pc
= buf_get_u32(armv4_5
->core_cache
->reg_list
[15].value
, 0, 32);
352 buf_set_u32(ARMV4_5_CORE_REG_MODE(armv4_5
->core_cache
, armv4_5
->core_mode
, 14).value
, 0, 32, old_pc
+ 4);
353 buf_set_u32(armv4_5
->core_cache
->reg_list
[15].value
, 0, 32, target
);
355 else if (instruction
.type
== ARM_BX
)
359 armv4_5
->core_state
= ARMV4_5_STATE_THUMB
;
363 armv4_5
->core_state
= ARMV4_5_STATE_ARM
;
365 buf_set_u32(armv4_5
->core_cache
->reg_list
[15].value
, 0, 32, target
& 0xfffffffe);
367 else if (instruction
.type
== ARM_BLX
)
369 u32 old_pc
= buf_get_u32(armv4_5
->core_cache
->reg_list
[15].value
, 0, 32);
370 buf_set_u32(ARMV4_5_CORE_REG_MODE(armv4_5
->core_cache
, armv4_5
->core_mode
, 14).value
, 0, 32, old_pc
+ 4);
374 armv4_5
->core_state
= ARMV4_5_STATE_THUMB
;
378 armv4_5
->core_state
= ARMV4_5_STATE_ARM
;
380 buf_set_u32(armv4_5
->core_cache
->reg_list
[15].value
, 0, 32, target
& 0xfffffffe);
386 /* data processing instructions, except compare instructions (CMP, CMN, TST, TEQ) */
387 else if (((instruction
.type
>= ARM_AND
) && (instruction
.type
<= ARM_RSC
))
388 || ((instruction
.type
>= ARM_ORR
) && (instruction
.type
<= ARM_MVN
)))
390 u32 Rd
, Rn
, shifter_operand
;
391 u8 C
= buf_get_u32(armv4_5
->core_cache
->reg_list
[ARMV4_5_CPSR
].value
, 29, 1);
395 Rn
= buf_get_u32(ARMV4_5_CORE_REG_MODE(armv4_5
->core_cache
, armv4_5
->core_mode
, instruction
.info
.data_proc
.Rn
).value
, 0, 32);
396 shifter_operand
= arm_shifter_operand(armv4_5
, instruction
.info
.data_proc
.variant
, instruction
.info
.data_proc
.shifter_operand
, &carry_out
);
398 /* adjust Rn in case the PC is being read */
399 if (instruction
.info
.data_proc
.Rn
== 15)
400 Rn
+= 2 * instruction_size
;
402 if (instruction
.type
== ARM_AND
)
403 Rd
= Rn
& shifter_operand
;
404 else if (instruction
.type
== ARM_EOR
)
405 Rd
= Rn
^ shifter_operand
;
406 else if (instruction
.type
== ARM_SUB
)
407 Rd
= Rn
- shifter_operand
;
408 else if (instruction
.type
== ARM_RSB
)
409 Rd
= shifter_operand
- Rn
;
410 else if (instruction
.type
== ARM_ADD
)
411 Rd
= Rn
+ shifter_operand
;
412 else if (instruction
.type
== ARM_ADC
)
413 Rd
= Rn
+ shifter_operand
+ (C
& 1);
414 else if (instruction
.type
== ARM_SBC
)
415 Rd
= Rn
- shifter_operand
- (C
& 1) ? 0 : 1;
416 else if (instruction
.type
== ARM_RSC
)
417 Rd
= shifter_operand
- Rn
- (C
& 1) ? 0 : 1;
418 else if (instruction
.type
== ARM_ORR
)
419 Rd
= Rn
| shifter_operand
;
420 else if (instruction
.type
== ARM_BIC
)
421 Rd
= Rn
& ~(shifter_operand
);
422 else if (instruction
.type
== ARM_MOV
)
423 Rd
= shifter_operand
;
424 else if (instruction
.type
== ARM_MVN
)
425 Rd
= ~shifter_operand
;
429 if (instruction
.info
.data_proc
.Rd
== 15)
436 *dry_run_pc
= current_pc
+ instruction_size
;
443 buf_set_u32(ARMV4_5_CORE_REG_MODE(armv4_5
->core_cache
, armv4_5
->core_mode
, instruction
.info
.data_proc
.Rd
).value
, 0, 32, Rd
);
444 WARNING("no updating of flags yet");
446 if (instruction
.info
.data_proc
.Rd
== 15)
450 /* compare instructions (CMP, CMN, TST, TEQ) */
451 else if ((instruction
.type
>= ARM_TST
) && (instruction
.type
<= ARM_CMN
))
455 *dry_run_pc
= current_pc
+ instruction_size
;
460 WARNING("no updating of flags yet");
463 /* load register instructions */
464 else if ((instruction
.type
>= ARM_LDR
) && (instruction
.type
<= ARM_LDRSH
))
466 u32 load_address
, modified_address
, load_value
;
467 u32 Rn
= buf_get_u32(ARMV4_5_CORE_REG_MODE(armv4_5
->core_cache
, armv4_5
->core_mode
, instruction
.info
.load_store
.Rn
).value
, 0, 32);
469 /* adjust Rn in case the PC is being read */
470 if (instruction
.info
.load_store
.Rn
== 15)
471 Rn
+= 2 * instruction_size
;
473 if (instruction
.info
.load_store
.offset_mode
== 0)
475 if (instruction
.info
.load_store
.U
)
476 modified_address
= Rn
+ instruction
.info
.load_store
.offset
.offset
;
478 modified_address
= Rn
- instruction
.info
.load_store
.offset
.offset
;
480 else if (instruction
.info
.load_store
.offset_mode
== 1)
483 u32 Rm
= buf_get_u32(ARMV4_5_CORE_REG_MODE(armv4_5
->core_cache
, armv4_5
->core_mode
, instruction
.info
.load_store
.offset
.reg
.Rm
).value
, 0, 32);
484 u8 shift
= instruction
.info
.load_store
.offset
.reg
.shift
;
485 u8 shift_imm
= instruction
.info
.load_store
.offset
.reg
.shift_imm
;
486 u8 carry
= buf_get_u32(armv4_5
->core_cache
->reg_list
[ARMV4_5_CPSR
].value
, 29, 1);
488 offset
= arm_shift(shift
, Rm
, shift_imm
, &carry
);
490 if (instruction
.info
.load_store
.U
)
491 modified_address
= Rn
+ offset
;
493 modified_address
= Rn
- offset
;
497 ERROR("BUG: offset_mode neither 0 (offset) nor 1 (scaled register)");
500 if (instruction
.info
.load_store
.index_mode
== 0)
503 * we load from the modified address, but don't change the base address register */
504 load_address
= modified_address
;
505 modified_address
= Rn
;
507 else if (instruction
.info
.load_store
.index_mode
== 1)
510 * we load from the modified address, and write it back to the base address register */
511 load_address
= modified_address
;
513 else if (instruction
.info
.load_store
.index_mode
== 2)
516 * we load from the unmodified address, and write the modified address back */
520 target_read_u32(target
, load_address
, &load_value
);
524 if (instruction
.info
.load_store
.Rd
== 15)
526 *dry_run_pc
= load_value
;
531 *dry_run_pc
= current_pc
+ instruction_size
;
538 if ((instruction
.info
.load_store
.index_mode
== 1) ||
539 (instruction
.info
.load_store
.index_mode
== 2))
541 buf_set_u32(ARMV4_5_CORE_REG_MODE(armv4_5
->core_cache
, armv4_5
->core_mode
, instruction
.info
.load_store
.Rn
).value
, 0, 32, modified_address
);
543 buf_set_u32(ARMV4_5_CORE_REG_MODE(armv4_5
->core_cache
, armv4_5
->core_mode
, instruction
.info
.load_store
.Rd
).value
, 0, 32, load_value
);
545 if (instruction
.info
.load_store
.Rd
== 15)
549 /* load multiple instruction */
550 else if (instruction
.type
== ARM_LDM
)
553 u32 Rn
= buf_get_u32(ARMV4_5_CORE_REG_MODE(armv4_5
->core_cache
, armv4_5
->core_mode
, instruction
.info
.load_store_multiple
.Rn
).value
, 0, 32);
557 for (i
= 0; i
< 16; i
++)
559 if (instruction
.info
.load_store_multiple
.register_list
& (1 << i
))
563 switch (instruction
.info
.load_store_multiple
.addressing_mode
)
565 case 0: /* Increment after */
568 case 1: /* Increment before */
571 case 2: /* Decrement after */
572 Rn
= Rn
- (bits_set
* 4) + 4;
574 case 3: /* Decrement before */
575 Rn
= Rn
- (bits_set
* 4);
579 for (i
= 0; i
< 16; i
++)
581 if (instruction
.info
.load_store_multiple
.register_list
& (1 << i
))
583 target_read_u32(target
, Rn
, &load_values
[i
]);
590 if (instruction
.info
.load_store_multiple
.register_list
& 0x8000)
592 *dry_run_pc
= load_values
[15];
598 enum armv4_5_mode mode
= armv4_5
->core_mode
;
601 if (instruction
.info
.load_store_multiple
.S
)
603 if (instruction
.info
.load_store_multiple
.register_list
& 0x8000)
606 mode
= ARMV4_5_MODE_USR
;
609 for (i
= 0; i
< 16; i
++)
611 if (instruction
.info
.load_store_multiple
.register_list
& (1 << i
))
613 buf_set_u32(ARMV4_5_CORE_REG_MODE(armv4_5
->core_cache
, mode
, i
).value
, 0, 32, load_values
[i
]);
619 u32 spsr
= buf_get_u32(ARMV4_5_CORE_REG_MODE(armv4_5
->core_cache
, armv4_5
->core_mode
, 16).value
, 0, 32);
620 buf_set_u32(armv4_5
->core_cache
->reg_list
[ARMV4_5_CPSR
].value
, 0, 32, spsr
);
623 /* base register writeback */
624 if (instruction
.info
.load_store_multiple
.W
)
625 buf_set_u32(ARMV4_5_CORE_REG_MODE(armv4_5
->core_cache
, armv4_5
->core_mode
, instruction
.info
.load_store_multiple
.Rn
).value
, 0, 32, Rn
);
627 if (instruction
.info
.load_store_multiple
.register_list
& 0x8000)
631 /* store multiple instruction */
632 else if (instruction
.type
== ARM_STM
)
638 /* STM wont affect PC (advance by instruction size */
642 u32 Rn
= buf_get_u32(ARMV4_5_CORE_REG_MODE(armv4_5
->core_cache
, armv4_5
->core_mode
, instruction
.info
.load_store_multiple
.Rn
).value
, 0, 32);
644 enum armv4_5_mode mode
= armv4_5
->core_mode
;
646 for (i
= 0; i
< 16; i
++)
648 if (instruction
.info
.load_store_multiple
.register_list
& (1 << i
))
652 if (instruction
.info
.load_store_multiple
.S
)
654 mode
= ARMV4_5_MODE_USR
;
657 switch (instruction
.info
.load_store_multiple
.addressing_mode
)
659 case 0: /* Increment after */
662 case 1: /* Increment before */
665 case 2: /* Decrement after */
666 Rn
= Rn
- (bits_set
* 4) + 4;
668 case 3: /* Decrement before */
669 Rn
= Rn
- (bits_set
* 4);
673 for (i
= 0; i
< 16; i
++)
675 if (instruction
.info
.load_store_multiple
.register_list
& (1 << i
))
677 target_write_u32(target
, Rn
, buf_get_u32(ARMV4_5_CORE_REG_MODE(armv4_5
->core_cache
, armv4_5
->core_mode
, i
).value
, 0, 32));
682 /* base register writeback */
683 if (instruction
.info
.load_store_multiple
.W
)
684 buf_set_u32(ARMV4_5_CORE_REG_MODE(armv4_5
->core_cache
, armv4_5
->core_mode
, instruction
.info
.load_store_multiple
.Rn
).value
, 0, 32, Rn
);
688 else if (!dry_run_pc
)
690 /* the instruction wasn't handled, but we're supposed to simulate it
692 return ERROR_ARM_SIMULATOR_NOT_IMPLEMENTED
;
697 *dry_run_pc
= current_pc
+ instruction_size
;
702 buf_set_u32(armv4_5
->core_cache
->reg_list
[15].value
, 0, 32, current_pc
+ instruction_size
);
Linking to existing account procedure
If you already have an account and want to add another login method
you
MUST first sign in with your existing account and
then change URL to read
https://review.openocd.org/login/?link
to get to this page again but this time it'll work for linking. Thank you.
SSH host keys fingerprints
1024 SHA256:YKx8b7u5ZWdcbp7/4AeXNaqElP49m6QrwfXaqQGJAOk gerrit-code-review@openocd.zylin.com (DSA)
384 SHA256:jHIbSQa4REvwCFG4cq5LBlBLxmxSqelQPem/EXIrxjk gerrit-code-review@openocd.org (ECDSA)
521 SHA256:UAOPYkU9Fjtcao0Ul/Rrlnj/OsQvt+pgdYSZ4jOYdgs gerrit-code-review@openocd.org (ECDSA)
256 SHA256:A13M5QlnozFOvTllybRZH6vm7iSt0XLxbA48yfc2yfY gerrit-code-review@openocd.org (ECDSA)
256 SHA256:spYMBqEYoAOtK7yZBrcwE8ZpYt6b68Cfh9yEVetvbXg gerrit-code-review@openocd.org (ED25519)
+--[ED25519 256]--+
|=.. |
|+o.. . |
|*.o . . |
|+B . . . |
|Bo. = o S |
|Oo.+ + = |
|oB=.* = . o |
| =+=.+ + E |
|. .=o . o |
+----[SHA256]-----+
2048 SHA256:0Onrb7/PHjpo6iVZ7xQX2riKN83FJ3KGU0TvI0TaFG4 gerrit-code-review@openocd.zylin.com (RSA)