commit 41c066f2c4d436c535616fe182331766c57838f0 upstream
When CONFIG_RANDOMIZE_MODULE_REGION_FULL=y, the offset between loaded
modules and the core kernel may exceed 4 GB, putting symbols exported
by the core kernel out of the reach of the ordinary adrp/add instruction
pairs used to generate relative symbol references. So make the adr_l
macro emit a movz/movk sequence instead when executing in module context.
While at it, remove the pointless special case for the stack pointer.
Acked-by: Mark Rutland <mark.rutland@arm.com>
Acked-by: Will Deacon <will.deacon@arm.com>
Signed-off-by: Ard Biesheuvel <ard.biesheuvel@linaro.org>
Signed-off-by: Catalin Marinas <catalin.marinas@arm.com>
[ dannf: backported to v4.4 by replacing the 3-arg adr_l macro in head.S
with it's output, as this commit drops the 3-arg variant ]
Fixes: c042dd600f ("crypto: arm64/sha - avoid non-standard inline asm tricks")
Signed-off-by: dann frazier <dann.frazier@canonical.com>
Signed-off-by: Greg Kroah-Hartman <gregkh@linuxfoundation.org>
286 lines
6.3 KiB
C
286 lines
6.3 KiB
C
/*
|
|
* Based on arch/arm/include/asm/assembler.h
|
|
*
|
|
* Copyright (C) 1996-2000 Russell King
|
|
* Copyright (C) 2012 ARM Ltd.
|
|
*
|
|
* This program is free software; you can redistribute it and/or modify
|
|
* it under the terms of the GNU General Public License version 2 as
|
|
* published by the Free Software Foundation.
|
|
*
|
|
* This program is distributed in the hope that it will be useful,
|
|
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
* GNU General Public License for more details.
|
|
*
|
|
* You should have received a copy of the GNU General Public License
|
|
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
*/
|
|
#ifndef __ASSEMBLY__
|
|
#error "Only include this from assembly code"
|
|
#endif
|
|
|
|
#ifndef __ASM_ASSEMBLER_H
|
|
#define __ASM_ASSEMBLER_H
|
|
|
|
#include <asm/cputype.h>
|
|
#include <asm/ptrace.h>
|
|
#include <asm/thread_info.h>
|
|
|
|
/*
|
|
* Stack pushing/popping (register pairs only). Equivalent to store decrement
|
|
* before, load increment after.
|
|
*/
|
|
.macro push, xreg1, xreg2
|
|
stp \xreg1, \xreg2, [sp, #-16]!
|
|
.endm
|
|
|
|
.macro pop, xreg1, xreg2
|
|
ldp \xreg1, \xreg2, [sp], #16
|
|
.endm
|
|
|
|
/*
|
|
* Enable and disable interrupts.
|
|
*/
|
|
.macro disable_irq
|
|
msr daifset, #2
|
|
.endm
|
|
|
|
.macro enable_irq
|
|
msr daifclr, #2
|
|
.endm
|
|
|
|
/*
|
|
* Enable and disable debug exceptions.
|
|
*/
|
|
.macro disable_dbg
|
|
msr daifset, #8
|
|
.endm
|
|
|
|
.macro enable_dbg
|
|
msr daifclr, #8
|
|
.endm
|
|
|
|
.macro disable_step_tsk, flgs, tmp
|
|
tbz \flgs, #TIF_SINGLESTEP, 9990f
|
|
mrs \tmp, mdscr_el1
|
|
bic \tmp, \tmp, #1
|
|
msr mdscr_el1, \tmp
|
|
isb // Synchronise with enable_dbg
|
|
9990:
|
|
.endm
|
|
|
|
.macro enable_step_tsk, flgs, tmp
|
|
tbz \flgs, #TIF_SINGLESTEP, 9990f
|
|
disable_dbg
|
|
mrs \tmp, mdscr_el1
|
|
orr \tmp, \tmp, #1
|
|
msr mdscr_el1, \tmp
|
|
9990:
|
|
.endm
|
|
|
|
/*
|
|
* Enable both debug exceptions and interrupts. This is likely to be
|
|
* faster than two daifclr operations, since writes to this register
|
|
* are self-synchronising.
|
|
*/
|
|
.macro enable_dbg_and_irq
|
|
msr daifclr, #(8 | 2)
|
|
.endm
|
|
|
|
/*
|
|
* SMP data memory barrier
|
|
*/
|
|
.macro smp_dmb, opt
|
|
dmb \opt
|
|
.endm
|
|
|
|
#define USER(l, x...) \
|
|
9999: x; \
|
|
.section __ex_table,"a"; \
|
|
.align 3; \
|
|
.quad 9999b,l; \
|
|
.previous
|
|
|
|
/*
|
|
* Register aliases.
|
|
*/
|
|
lr .req x30 // link register
|
|
|
|
/*
|
|
* Vector entry
|
|
*/
|
|
.macro ventry label
|
|
.align 7
|
|
b \label
|
|
.endm
|
|
|
|
/*
|
|
* Select code when configured for BE.
|
|
*/
|
|
#ifdef CONFIG_CPU_BIG_ENDIAN
|
|
#define CPU_BE(code...) code
|
|
#else
|
|
#define CPU_BE(code...)
|
|
#endif
|
|
|
|
/*
|
|
* Select code when configured for LE.
|
|
*/
|
|
#ifdef CONFIG_CPU_BIG_ENDIAN
|
|
#define CPU_LE(code...)
|
|
#else
|
|
#define CPU_LE(code...) code
|
|
#endif
|
|
|
|
/*
|
|
* Define a macro that constructs a 64-bit value by concatenating two
|
|
* 32-bit registers. Note that on big endian systems the order of the
|
|
* registers is swapped.
|
|
*/
|
|
#ifndef CONFIG_CPU_BIG_ENDIAN
|
|
.macro regs_to_64, rd, lbits, hbits
|
|
#else
|
|
.macro regs_to_64, rd, hbits, lbits
|
|
#endif
|
|
orr \rd, \lbits, \hbits, lsl #32
|
|
.endm
|
|
|
|
/*
|
|
* Pseudo-ops for PC-relative adr/ldr/str <reg>, <symbol> where
|
|
* <symbol> is within the range +/- 4 GB of the PC when running
|
|
* in core kernel context. In module context, a movz/movk sequence
|
|
* is used, since modules may be loaded far away from the kernel
|
|
* when KASLR is in effect.
|
|
*/
|
|
/*
|
|
* @dst: destination register (64 bit wide)
|
|
* @sym: name of the symbol
|
|
*/
|
|
.macro adr_l, dst, sym
|
|
#ifndef MODULE
|
|
adrp \dst, \sym
|
|
add \dst, \dst, :lo12:\sym
|
|
#else
|
|
movz \dst, #:abs_g3:\sym
|
|
movk \dst, #:abs_g2_nc:\sym
|
|
movk \dst, #:abs_g1_nc:\sym
|
|
movk \dst, #:abs_g0_nc:\sym
|
|
#endif
|
|
.endm
|
|
|
|
/*
|
|
* @dst: destination register (32 or 64 bit wide)
|
|
* @sym: name of the symbol
|
|
* @tmp: optional 64-bit scratch register to be used if <dst> is a
|
|
* 32-bit wide register, in which case it cannot be used to hold
|
|
* the address
|
|
*/
|
|
.macro ldr_l, dst, sym, tmp=
|
|
#ifndef MODULE
|
|
.ifb \tmp
|
|
adrp \dst, \sym
|
|
ldr \dst, [\dst, :lo12:\sym]
|
|
.else
|
|
adrp \tmp, \sym
|
|
ldr \dst, [\tmp, :lo12:\sym]
|
|
.endif
|
|
#else
|
|
.ifb \tmp
|
|
adr_l \dst, \sym
|
|
ldr \dst, [\dst]
|
|
.else
|
|
adr_l \tmp, \sym
|
|
ldr \dst, [\tmp]
|
|
.endif
|
|
#endif
|
|
.endm
|
|
|
|
/*
|
|
* @src: source register (32 or 64 bit wide)
|
|
* @sym: name of the symbol
|
|
* @tmp: mandatory 64-bit scratch register to calculate the address
|
|
* while <src> needs to be preserved.
|
|
*/
|
|
.macro str_l, src, sym, tmp
|
|
#ifndef MODULE
|
|
adrp \tmp, \sym
|
|
str \src, [\tmp, :lo12:\sym]
|
|
#else
|
|
adr_l \tmp, \sym
|
|
str \src, [\tmp]
|
|
#endif
|
|
.endm
|
|
|
|
/*
|
|
* Annotate a function as position independent, i.e., safe to be called before
|
|
* the kernel virtual mapping is activated.
|
|
*/
|
|
#define ENDPIPROC(x) \
|
|
.globl __pi_##x; \
|
|
.type __pi_##x, %function; \
|
|
.set __pi_##x, x; \
|
|
.size __pi_##x, . - x; \
|
|
ENDPROC(x)
|
|
|
|
/*
|
|
* mov_q - move an immediate constant into a 64-bit register using
|
|
* between 2 and 4 movz/movk instructions (depending on the
|
|
* magnitude and sign of the operand)
|
|
*/
|
|
.macro mov_q, reg, val
|
|
.if (((\val) >> 31) == 0 || ((\val) >> 31) == 0x1ffffffff)
|
|
movz \reg, :abs_g1_s:\val
|
|
.else
|
|
.if (((\val) >> 47) == 0 || ((\val) >> 47) == 0x1ffff)
|
|
movz \reg, :abs_g2_s:\val
|
|
.else
|
|
movz \reg, :abs_g3:\val
|
|
movk \reg, :abs_g2_nc:\val
|
|
.endif
|
|
movk \reg, :abs_g1_nc:\val
|
|
.endif
|
|
movk \reg, :abs_g0_nc:\val
|
|
.endm
|
|
|
|
/*
|
|
* Check the MIDR_EL1 of the current CPU for a given model and a range of
|
|
* variant/revision. See asm/cputype.h for the macros used below.
|
|
*
|
|
* model: MIDR_CPU_PART of CPU
|
|
* rv_min: Minimum of MIDR_CPU_VAR_REV()
|
|
* rv_max: Maximum of MIDR_CPU_VAR_REV()
|
|
* res: Result register.
|
|
* tmp1, tmp2, tmp3: Temporary registers
|
|
*
|
|
* Corrupts: res, tmp1, tmp2, tmp3
|
|
* Returns: 0, if the CPU id doesn't match. Non-zero otherwise
|
|
*/
|
|
.macro cpu_midr_match model, rv_min, rv_max, res, tmp1, tmp2, tmp3
|
|
mrs \res, midr_el1
|
|
mov_q \tmp1, (MIDR_REVISION_MASK | MIDR_VARIANT_MASK)
|
|
mov_q \tmp2, MIDR_CPU_PART_MASK
|
|
and \tmp3, \res, \tmp2 // Extract model
|
|
and \tmp1, \res, \tmp1 // rev & variant
|
|
mov_q \tmp2, \model
|
|
cmp \tmp3, \tmp2
|
|
cset \res, eq
|
|
cbz \res, .Ldone\@ // Model matches ?
|
|
|
|
.if (\rv_min != 0) // Skip min check if rv_min == 0
|
|
mov_q \tmp3, \rv_min
|
|
cmp \tmp1, \tmp3
|
|
cset \res, ge
|
|
.endif // \rv_min != 0
|
|
/* Skip rv_max check if rv_min == rv_max && rv_min != 0 */
|
|
.if ((\rv_min != \rv_max) || \rv_min == 0)
|
|
mov_q \tmp2, \rv_max
|
|
cmp \tmp1, \tmp2
|
|
cset \tmp2, le
|
|
and \res, \res, \tmp2
|
|
.endif
|
|
.Ldone\@:
|
|
.endm
|
|
|
|
#endif /* __ASM_ASSEMBLER_H */
|