aboutsummaryrefslogtreecommitdiff
path: root/ARMeilleure/Instructions/InstEmitSimdLogical.cs
diff options
context:
space:
mode:
Diffstat (limited to 'ARMeilleure/Instructions/InstEmitSimdLogical.cs')
-rw-r--r--ARMeilleure/Instructions/InstEmitSimdLogical.cs41
1 files changed, 33 insertions, 8 deletions
diff --git a/ARMeilleure/Instructions/InstEmitSimdLogical.cs b/ARMeilleure/Instructions/InstEmitSimdLogical.cs
index dbd1a1a0..624ae841 100644
--- a/ARMeilleure/Instructions/InstEmitSimdLogical.cs
+++ b/ARMeilleure/Instructions/InstEmitSimdLogical.cs
@@ -336,20 +336,45 @@ namespace ARMeilleure.Instructions
{
OpCodeSimd op = (OpCodeSimd)context.CurrOp;
- Operand res = context.VectorZero();
+ if (Optimizations.UseGfni)
+ {
+ const long bitMatrix =
+ (0b10000000L << 56) |
+ (0b01000000L << 48) |
+ (0b00100000L << 40) |
+ (0b00010000L << 32) |
+ (0b00001000L << 24) |
+ (0b00000100L << 16) |
+ (0b00000010L << 8) |
+ (0b00000001L << 0);
- int elems = op.RegisterSize == RegisterSize.Simd128 ? 16 : 8;
+ Operand vBitMatrix = X86GetAllElements(context, bitMatrix);
- for (int index = 0; index < elems; index++)
- {
- Operand ne = EmitVectorExtractZx(context, op.Rn, index, 0);
+ Operand res = context.AddIntrinsic(Intrinsic.X86Gf2p8affineqb, GetVec(op.Rn), vBitMatrix, Const(0));
- Operand de = EmitReverseBits8Op(context, ne);
+ if (op.RegisterSize == RegisterSize.Simd64)
+ {
+ res = context.VectorZeroUpper64(res);
+ }
- res = EmitVectorInsert(context, res, de, index, 0);
+ context.Copy(GetVec(op.Rd), res);
}
+ else
+ {
+ Operand res = context.VectorZero();
+ int elems = op.RegisterSize == RegisterSize.Simd128 ? 16 : 8;
- context.Copy(GetVec(op.Rd), res);
+ for (int index = 0; index < elems; index++)
+ {
+ Operand ne = EmitVectorExtractZx(context, op.Rn, index, 0);
+
+ Operand de = EmitReverseBits8Op(context, ne);
+
+ res = EmitVectorInsert(context, res, de, index, 0);
+ }
+
+ context.Copy(GetVec(op.Rd), res);
+ }
}
private static Operand EmitReverseBits8Op(ArmEmitterContext context, Operand op)