py/asm: Fix x86 and ARM assemblers due to recent code refactoring.

This commit is contained in:
Damien George
2016-12-09 22:54:45 +11:00
parent 155fdc74d5
commit 93ee6603b1
2 changed files with 10 additions and 10 deletions

View File

@@ -39,7 +39,7 @@
#define SIGNED_FIT24(x) (((x) & 0xff800000) == 0) || (((x) & 0xff000000) == 0xff000000)
void asm_arm_end_pass(asm_arm_t *as) {
if (as->pass == ASM_ARM_PASS_EMIT) {
if (as->base.pass == ASM_ARM_PASS_EMIT) {
#ifdef __arm__
// flush I- and D-cache
asm volatile(
@@ -333,9 +333,9 @@ void asm_arm_strb_reg_reg_reg(asm_arm_t *as, uint rd, uint rm, uint rn) {
}
void asm_arm_bcc_label(asm_arm_t *as, int cond, uint label) {
assert(label < as->max_num_labels);
mp_uint_t dest = as->label_offsets[label];
mp_int_t rel = dest - as->code_offset;
assert(label < as->base.max_num_labels);
mp_uint_t dest = as->base.label_offsets[label];
mp_int_t rel = dest - as->base.code_offset;
rel -= 8; // account for instruction prefetch, PC is 8 bytes ahead of this instruction
rel >>= 2; // in ARM mode the branch target is 32-bit aligned, so the 2 LSB are omitted