LoongArch: Add la_abs macro implementation

Use the "la_abs macro" instead of the "la.abs pseudo instruction" to
prepare for the subsequent PIE kernel. When PIE is not enabled, la_abs
is equivalent to la.abs.

Signed-off-by: Youling Tang <tangyouling@loongson.cn>
Signed-off-by: Huacai Chen <chenhuacai@loongson.cn>
This commit is contained in:
Youling Tang 2023-02-25 15:52:56 +08:00 committed by Huacai Chen
parent 8cbd5ebfe2
commit 396233c650
4 changed files with 16 additions and 12 deletions

View File

@ -274,4 +274,8 @@
nor \dst, \src, zero nor \dst, \src, zero
.endm .endm
.macro la_abs reg, sym
la.abs \reg, \sym
.endm
#endif /* _ASM_ASMMACRO_H */ #endif /* _ASM_ASMMACRO_H */

View File

@ -86,7 +86,7 @@
* new value in sp. * new value in sp.
*/ */
.macro get_saved_sp docfi=0 .macro get_saved_sp docfi=0
la.abs t1, kernelsp la_abs t1, kernelsp
#ifdef CONFIG_SMP #ifdef CONFIG_SMP
csrrd t0, PERCPU_BASE_KS csrrd t0, PERCPU_BASE_KS
LONG_ADD t1, t1, t0 LONG_ADD t1, t1, t0

View File

@ -34,7 +34,7 @@ SYM_FUNC_END(__arch_cpu_idle)
SYM_FUNC_START(handle_vint) SYM_FUNC_START(handle_vint)
BACKUP_T0T1 BACKUP_T0T1
SAVE_ALL SAVE_ALL
la.abs t1, __arch_cpu_idle la_abs t1, __arch_cpu_idle
LONG_L t0, sp, PT_ERA LONG_L t0, sp, PT_ERA
/* 32 byte rollback region */ /* 32 byte rollback region */
ori t0, t0, 0x1f ori t0, t0, 0x1f
@ -43,7 +43,7 @@ SYM_FUNC_START(handle_vint)
LONG_S t0, sp, PT_ERA LONG_S t0, sp, PT_ERA
1: move a0, sp 1: move a0, sp
move a1, sp move a1, sp
la.abs t0, do_vint la_abs t0, do_vint
jirl ra, t0, 0 jirl ra, t0, 0
RESTORE_ALL_AND_RET RESTORE_ALL_AND_RET
SYM_FUNC_END(handle_vint) SYM_FUNC_END(handle_vint)
@ -72,7 +72,7 @@ SYM_FUNC_END(except_vec_cex)
SAVE_ALL SAVE_ALL
build_prep_\prep build_prep_\prep
move a0, sp move a0, sp
la.abs t0, do_\handler la_abs t0, do_\handler
jirl ra, t0, 0 jirl ra, t0, 0
668: 668:
RESTORE_ALL_AND_RET RESTORE_ALL_AND_RET
@ -93,6 +93,6 @@ SYM_FUNC_END(except_vec_cex)
BUILD_HANDLER reserved reserved none /* others */ BUILD_HANDLER reserved reserved none /* others */
SYM_FUNC_START(handle_sys) SYM_FUNC_START(handle_sys)
la.abs t0, handle_syscall la_abs t0, handle_syscall
jr t0 jr t0
SYM_FUNC_END(handle_sys) SYM_FUNC_END(handle_sys)

View File

@ -39,7 +39,7 @@ SYM_FUNC_START(handle_tlb_protect)
move a1, zero move a1, zero
csrrd a2, LOONGARCH_CSR_BADV csrrd a2, LOONGARCH_CSR_BADV
REG_S a2, sp, PT_BVADDR REG_S a2, sp, PT_BVADDR
la.abs t0, do_page_fault la_abs t0, do_page_fault
jirl ra, t0, 0 jirl ra, t0, 0
RESTORE_ALL_AND_RET RESTORE_ALL_AND_RET
SYM_FUNC_END(handle_tlb_protect) SYM_FUNC_END(handle_tlb_protect)
@ -115,7 +115,7 @@ smp_pgtable_change_load:
#ifdef CONFIG_64BIT #ifdef CONFIG_64BIT
vmalloc_load: vmalloc_load:
la.abs t1, swapper_pg_dir la_abs t1, swapper_pg_dir
b vmalloc_done_load b vmalloc_done_load
#endif #endif
@ -186,7 +186,7 @@ tlb_huge_update_load:
nopage_tlb_load: nopage_tlb_load:
dbar 0 dbar 0
csrrd ra, EXCEPTION_KS2 csrrd ra, EXCEPTION_KS2
la.abs t0, tlb_do_page_fault_0 la_abs t0, tlb_do_page_fault_0
jr t0 jr t0
SYM_FUNC_END(handle_tlb_load) SYM_FUNC_END(handle_tlb_load)
@ -262,7 +262,7 @@ smp_pgtable_change_store:
#ifdef CONFIG_64BIT #ifdef CONFIG_64BIT
vmalloc_store: vmalloc_store:
la.abs t1, swapper_pg_dir la_abs t1, swapper_pg_dir
b vmalloc_done_store b vmalloc_done_store
#endif #endif
@ -335,7 +335,7 @@ tlb_huge_update_store:
nopage_tlb_store: nopage_tlb_store:
dbar 0 dbar 0
csrrd ra, EXCEPTION_KS2 csrrd ra, EXCEPTION_KS2
la.abs t0, tlb_do_page_fault_1 la_abs t0, tlb_do_page_fault_1
jr t0 jr t0
SYM_FUNC_END(handle_tlb_store) SYM_FUNC_END(handle_tlb_store)
@ -410,7 +410,7 @@ smp_pgtable_change_modify:
#ifdef CONFIG_64BIT #ifdef CONFIG_64BIT
vmalloc_modify: vmalloc_modify:
la.abs t1, swapper_pg_dir la_abs t1, swapper_pg_dir
b vmalloc_done_modify b vmalloc_done_modify
#endif #endif
@ -482,7 +482,7 @@ tlb_huge_update_modify:
nopage_tlb_modify: nopage_tlb_modify:
dbar 0 dbar 0
csrrd ra, EXCEPTION_KS2 csrrd ra, EXCEPTION_KS2
la.abs t0, tlb_do_page_fault_1 la_abs t0, tlb_do_page_fault_1
jr t0 jr t0
SYM_FUNC_END(handle_tlb_modify) SYM_FUNC_END(handle_tlb_modify)