aboutsummaryrefslogtreecommitdiff
path: root/ARMeilleure/Instructions
diff options
context:
space:
mode:
Diffstat (limited to 'ARMeilleure/Instructions')
-rw-r--r--ARMeilleure/Instructions/InstEmitSimdLogical.cs33
-rw-r--r--ARMeilleure/Instructions/InstEmitSimdLogical32.cs7
-rw-r--r--ARMeilleure/Instructions/InstEmitSimdMove32.cs9
3 files changed, 47 insertions, 2 deletions
diff --git a/ARMeilleure/Instructions/InstEmitSimdLogical.cs b/ARMeilleure/Instructions/InstEmitSimdLogical.cs
index 8ca81580..2bf531e6 100644
--- a/ARMeilleure/Instructions/InstEmitSimdLogical.cs
+++ b/ARMeilleure/Instructions/InstEmitSimdLogical.cs
@@ -254,7 +254,22 @@ namespace ARMeilleure.Instructions
public static void Not_V(ArmEmitterContext context)
{
- if (Optimizations.UseSse2)
+ if (Optimizations.UseAvx512Ortho)
+ {
+ OpCodeSimd op = (OpCodeSimd)context.CurrOp;
+
+ Operand n = GetVec(op.Rn);
+
+ Operand res = context.AddIntrinsic(Intrinsic.X86Vpternlogd, n, n, Const(~0b10101010));
+
+ if (op.RegisterSize == RegisterSize.Simd64)
+ {
+ res = context.VectorZeroUpper64(res);
+ }
+
+ context.Copy(GetVec(op.Rd), res);
+ }
+ else if (Optimizations.UseSse2)
{
OpCodeSimd op = (OpCodeSimd)context.CurrOp;
@@ -283,6 +298,22 @@ namespace ARMeilleure.Instructions
{
InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64OrnV);
}
+ else if (Optimizations.UseAvx512Ortho)
+ {
+ OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
+
+ Operand n = GetVec(op.Rn);
+ Operand m = GetVec(op.Rm);
+
+ Operand res = context.AddIntrinsic(Intrinsic.X86Vpternlogd, n, m, Const(0b11001100 | ~0b10101010));
+
+ if (op.RegisterSize == RegisterSize.Simd64)
+ {
+ res = context.VectorZeroUpper64(res);
+ }
+
+ context.Copy(GetVec(op.Rd), res);
+ }
else if (Optimizations.UseSse2)
{
OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
diff --git a/ARMeilleure/Instructions/InstEmitSimdLogical32.cs b/ARMeilleure/Instructions/InstEmitSimdLogical32.cs
index c2a04778..68ef4ed1 100644
--- a/ARMeilleure/Instructions/InstEmitSimdLogical32.cs
+++ b/ARMeilleure/Instructions/InstEmitSimdLogical32.cs
@@ -151,6 +151,13 @@ namespace ARMeilleure.Instructions
{
InstEmitSimdHelper32Arm64.EmitVectorBinaryOpSimd32(context, (n, m) => context.AddIntrinsic(Intrinsic.Arm64OrnV | Intrinsic.Arm64V128, n, m));
}
+ else if (Optimizations.UseAvx512Ortho)
+ {
+ EmitVectorBinaryOpSimd32(context, (n, m) =>
+ {
+ return context.AddIntrinsic(Intrinsic.X86Vpternlogd, n, m, Const(0b11001100 | ~0b10101010));
+ });
+ }
else if (Optimizations.UseSse2)
{
Operand mask = context.VectorOne();
diff --git a/ARMeilleure/Instructions/InstEmitSimdMove32.cs b/ARMeilleure/Instructions/InstEmitSimdMove32.cs
index 17100eb9..b8b91b31 100644
--- a/ARMeilleure/Instructions/InstEmitSimdMove32.cs
+++ b/ARMeilleure/Instructions/InstEmitSimdMove32.cs
@@ -34,7 +34,14 @@ namespace ARMeilleure.Instructions
public static void Vmvn_I(ArmEmitterContext context)
{
- if (Optimizations.UseSse2)
+ if (Optimizations.UseAvx512Ortho)
+ {
+ EmitVectorUnaryOpSimd32(context, (op1) =>
+ {
+ return context.AddIntrinsic(Intrinsic.X86Vpternlogd, op1, op1, Const(0b01010101));
+ });
+ }
+ else if (Optimizations.UseSse2)
{
EmitVectorUnaryOpSimd32(context, (op1) =>
{