diff --git a/arch/powerpc/lib/Makefile b/arch/powerpc/lib/Makefile index 2c56f4636c2b..3c3146ba62da 100644 --- a/arch/powerpc/lib/Makefile +++ b/arch/powerpc/lib/Makefile @@ -37,7 +37,7 @@ obj-$(CONFIG_PPC_LIB_RHEAP) += rheap.o obj-$(CONFIG_FTR_FIXUP_SELFTEST) += feature-fixups-test.o -obj-$(CONFIG_ALTIVEC) += xor_vmx.o +obj-$(CONFIG_ALTIVEC) += xor_vmx.o xor_vmx_glue.o CFLAGS_xor_vmx.o += -maltivec $(call cc-option,-mabi=altivec) obj-$(CONFIG_PPC64) += $(obj64-y) diff --git a/arch/powerpc/lib/xor_vmx.c b/arch/powerpc/lib/xor_vmx.c index f9de69a04e88..4df240aa5f81 100644 --- a/arch/powerpc/lib/xor_vmx.c +++ b/arch/powerpc/lib/xor_vmx.c @@ -29,10 +29,7 @@ #define vector __attribute__((vector_size(16))) #endif -#include -#include -#include -#include +#include "xor_vmx.h" typedef vector signed char unative_t; @@ -64,16 +61,13 @@ typedef vector signed char unative_t; V1##_3 = vec_xor(V1##_3, V2##_3); \ } while (0) -void xor_altivec_2(unsigned long bytes, unsigned long *v1_in, - unsigned long *v2_in) +void __xor_altivec_2(unsigned long bytes, unsigned long *v1_in, + unsigned long *v2_in) { DEFINE(v1); DEFINE(v2); unsigned long lines = bytes / (sizeof(unative_t)) / 4; - preempt_disable(); - enable_kernel_altivec(); - do { LOAD(v1); LOAD(v2); @@ -83,23 +77,16 @@ void xor_altivec_2(unsigned long bytes, unsigned long *v1_in, v1 += 4; v2 += 4; } while (--lines > 0); - - disable_kernel_altivec(); - preempt_enable(); } -EXPORT_SYMBOL(xor_altivec_2); -void xor_altivec_3(unsigned long bytes, unsigned long *v1_in, - unsigned long *v2_in, unsigned long *v3_in) +void __xor_altivec_3(unsigned long bytes, unsigned long *v1_in, + unsigned long *v2_in, unsigned long *v3_in) { DEFINE(v1); DEFINE(v2); DEFINE(v3); unsigned long lines = bytes / (sizeof(unative_t)) / 4; - preempt_disable(); - enable_kernel_altivec(); - do { LOAD(v1); LOAD(v2); @@ -112,15 +99,11 @@ void xor_altivec_3(unsigned long bytes, unsigned long *v1_in, v2 += 4; v3 += 4; } while (--lines > 0); - - disable_kernel_altivec(); - preempt_enable(); } -EXPORT_SYMBOL(xor_altivec_3); -void xor_altivec_4(unsigned long bytes, unsigned long *v1_in, - unsigned long *v2_in, unsigned long *v3_in, - unsigned long *v4_in) +void __xor_altivec_4(unsigned long bytes, unsigned long *v1_in, + unsigned long *v2_in, unsigned long *v3_in, + unsigned long *v4_in) { DEFINE(v1); DEFINE(v2); @@ -128,9 +111,6 @@ void xor_altivec_4(unsigned long bytes, unsigned long *v1_in, DEFINE(v4); unsigned long lines = bytes / (sizeof(unative_t)) / 4; - preempt_disable(); - enable_kernel_altivec(); - do { LOAD(v1); LOAD(v2); @@ -146,15 +126,11 @@ void xor_altivec_4(unsigned long bytes, unsigned long *v1_in, v3 += 4; v4 += 4; } while (--lines > 0); - - disable_kernel_altivec(); - preempt_enable(); } -EXPORT_SYMBOL(xor_altivec_4); -void xor_altivec_5(unsigned long bytes, unsigned long *v1_in, - unsigned long *v2_in, unsigned long *v3_in, - unsigned long *v4_in, unsigned long *v5_in) +void __xor_altivec_5(unsigned long bytes, unsigned long *v1_in, + unsigned long *v2_in, unsigned long *v3_in, + unsigned long *v4_in, unsigned long *v5_in) { DEFINE(v1); DEFINE(v2); @@ -163,9 +139,6 @@ void xor_altivec_5(unsigned long bytes, unsigned long *v1_in, DEFINE(v5); unsigned long lines = bytes / (sizeof(unative_t)) / 4; - preempt_disable(); - enable_kernel_altivec(); - do { LOAD(v1); LOAD(v2); @@ -184,8 +157,4 @@ void xor_altivec_5(unsigned long bytes, unsigned long *v1_in, v4 += 4; v5 += 4; } while (--lines > 0); - - disable_kernel_altivec(); - preempt_enable(); } -EXPORT_SYMBOL(xor_altivec_5); diff --git a/arch/powerpc/lib/xor_vmx.h b/arch/powerpc/lib/xor_vmx.h new file mode 100644 index 000000000000..4746708451ae --- /dev/null +++ b/arch/powerpc/lib/xor_vmx.h @@ -0,0 +1,20 @@ +/* + * Simple interface to link xor_vmx.c and xor_vmx_glue.c + * + * Separating these file ensures that no altivec instructions are run + * outside of the enable/disable altivec block. + */ + +void __xor_altivec_2(unsigned long bytes, unsigned long *v1_in, + unsigned long *v2_in); + +void __xor_altivec_3(unsigned long bytes, unsigned long *v1_in, + unsigned long *v2_in, unsigned long *v3_in); + +void __xor_altivec_4(unsigned long bytes, unsigned long *v1_in, + unsigned long *v2_in, unsigned long *v3_in, + unsigned long *v4_in); + +void __xor_altivec_5(unsigned long bytes, unsigned long *v1_in, + unsigned long *v2_in, unsigned long *v3_in, + unsigned long *v4_in, unsigned long *v5_in); diff --git a/arch/powerpc/lib/xor_vmx_glue.c b/arch/powerpc/lib/xor_vmx_glue.c new file mode 100644 index 000000000000..6521fe5e8cef --- /dev/null +++ b/arch/powerpc/lib/xor_vmx_glue.c @@ -0,0 +1,62 @@ +/* + * Altivec XOR operations + * + * Copyright 2017 IBM Corp. + * + * This program is free software; you can redistribute it and/or + * modify it under the terms of the GNU General Public License + * as published by the Free Software Foundation; either version + * 2 of the License, or (at your option) any later version. + */ + +#include +#include +#include +#include +#include "xor_vmx.h" + +void xor_altivec_2(unsigned long bytes, unsigned long *v1_in, + unsigned long *v2_in) +{ + preempt_disable(); + enable_kernel_altivec(); + __xor_altivec_2(bytes, v1_in, v2_in); + disable_kernel_altivec(); + preempt_enable(); +} +EXPORT_SYMBOL(xor_altivec_2); + +void xor_altivec_3(unsigned long bytes, unsigned long *v1_in, + unsigned long *v2_in, unsigned long *v3_in) +{ + preempt_disable(); + enable_kernel_altivec(); + __xor_altivec_3(bytes, v1_in, v2_in, v3_in); + disable_kernel_altivec(); + preempt_enable(); +} +EXPORT_SYMBOL(xor_altivec_3); + +void xor_altivec_4(unsigned long bytes, unsigned long *v1_in, + unsigned long *v2_in, unsigned long *v3_in, + unsigned long *v4_in) +{ + preempt_disable(); + enable_kernel_altivec(); + __xor_altivec_4(bytes, v1_in, v2_in, v3_in, v4_in); + disable_kernel_altivec(); + preempt_enable(); +} +EXPORT_SYMBOL(xor_altivec_4); + +void xor_altivec_5(unsigned long bytes, unsigned long *v1_in, + unsigned long *v2_in, unsigned long *v3_in, + unsigned long *v4_in, unsigned long *v5_in) +{ + preempt_disable(); + enable_kernel_altivec(); + __xor_altivec_5(bytes, v1_in, v2_in, v3_in, v4_in, v5_in); + disable_kernel_altivec(); + preempt_enable(); +} +EXPORT_SYMBOL(xor_altivec_5);