diff --git a/libcpu/aarch64/common/mmu.c b/libcpu/aarch64/common/mmu.c index b02b42ff5705d3e5db05f5b02f499394a1c95b91..caabb59291879768630293323fd8f8243c841225 100644 --- a/libcpu/aarch64/common/mmu.c +++ b/libcpu/aarch64/common/mmu.c @@ -895,6 +895,9 @@ void rt_hw_mmu_setup_early(unsigned long *tbl0, unsigned long *tbl1, unsigned lo unsigned long count = (size + ARCH_SECTION_MASK) >> ARCH_SECTION_SHIFT; unsigned long normal_attr = MMU_MAP_CUSTOM(MMU_AP_KAUN, NORMAL_MEM); + mmu_memset((char *)tbl0, 0, sizeof(struct page_table)); + mmu_memset((char *)tbl1, 0, sizeof(struct page_table)); + ret = armv8_init_map_2M(tbl1 , va, va + pv_off, count, normal_attr); if (ret != 0) { diff --git a/libcpu/aarch64/cortex-a/entry_point.S b/libcpu/aarch64/cortex-a/entry_point.S index 3c300bcae76433c800ab755c5d6b2d9ba326b3b5..5ed887cedbf3fe1ec941e749b219c13abd409ce2 100644 --- a/libcpu/aarch64/cortex-a/entry_point.S +++ b/libcpu/aarch64/cortex-a/entry_point.S @@ -88,15 +88,31 @@ __start: msr cpacr_el1, x1 /* clear bss */ - ldr x1, =__bss_start - add x1, x1, x9 - ldr w2, =__bss_size - -.L__clean_bss_loop: - cbz w2, .L__jump_to_entry - str xzr, [x1], #8 - sub w2, w2, #8 - cbnz w2, .L__clean_bss_loop + ldr x0, =__bss_start + ldr x1, =__bss_end + ldr x2, =PV_OFFSET + add x0, x0, x2 + add x1, x1, x2 + + sub x2, x1, x0 + mov x3, x1 + cmp x2, #7 + bls .L__clean_bss_check + +.L__clean_bss_loop_quad: + str xzr, [x0], #8 + sub x2, x3, x0 + cmp x2, #7 + bhi .L__clean_bss_loop_quad + cmp x1, x0 + bls .L__jump_to_entry + +.L__clean_bss_loop_byte: + str xzr, [x0], #1 + +.L__clean_bss_check: + cmp x1, x0 + bhi .L__clean_bss_loop_byte .L__jump_to_entry: /* jump to C code, should not return */