aboutsummaryrefslogtreecommitdiff
path: root/ARMeilleure/Instructions/InstEmitSimdLogical.cs
diff options
context:
space:
mode:
authorgdkchan <gab.dark.100@gmail.com>2023-01-10 19:16:59 -0300
committerGitHub <noreply@github.com>2023-01-10 19:16:59 -0300
commit5e0f8e873857ce3ca3f532aff0936beb28e412c8 (patch)
tree576e5110c076b7d1f4d94e608ee21493f5b48879 /ARMeilleure/Instructions/InstEmitSimdLogical.cs
parentd16288a2a87f0979df30ba69d4fe10660177b6ac (diff)
Implement JIT Arm64 backend (#4114)1.1.536
* Implement JIT Arm64 backend * PPTC version bump * Address some feedback from Arm64 JIT PR * Address even more PR feedback * Remove unused IsPageAligned function * Sync Qc flag before calls * Fix comment and remove unused enum * Address riperiperi PR feedback * Delete Breakpoint IR instruction that was only implemented for Arm64
Diffstat (limited to 'ARMeilleure/Instructions/InstEmitSimdLogical.cs')
-rw-r--r--ARMeilleure/Instructions/InstEmitSimdLogical.cs54
1 files changed, 46 insertions, 8 deletions
diff --git a/ARMeilleure/Instructions/InstEmitSimdLogical.cs b/ARMeilleure/Instructions/InstEmitSimdLogical.cs
index 624ae841..8ca81580 100644
--- a/ARMeilleure/Instructions/InstEmitSimdLogical.cs
+++ b/ARMeilleure/Instructions/InstEmitSimdLogical.cs
@@ -14,7 +14,11 @@ namespace ARMeilleure.Instructions
{
public static void And_V(ArmEmitterContext context)
{
- if (Optimizations.UseSse2)
+ if (Optimizations.UseAdvSimd)
+ {
+ InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64AndV);
+ }
+ else if (Optimizations.UseSse2)
{
OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
@@ -38,7 +42,11 @@ namespace ARMeilleure.Instructions
public static void Bic_V(ArmEmitterContext context)
{
- if (Optimizations.UseSse2)
+ if (Optimizations.UseAdvSimd)
+ {
+ InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64BicV);
+ }
+ else if (Optimizations.UseSse2)
{
OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
@@ -98,12 +106,26 @@ namespace ARMeilleure.Instructions
public static void Bif_V(ArmEmitterContext context)
{
- EmitBifBit(context, notRm: true);
+ if (Optimizations.UseAdvSimd)
+ {
+ InstEmitSimdHelperArm64.EmitVectorTernaryOpRd(context, Intrinsic.Arm64BifV);
+ }
+ else
+ {
+ EmitBifBit(context, notRm: true);
+ }
}
public static void Bit_V(ArmEmitterContext context)
{
- EmitBifBit(context, notRm: false);
+ if (Optimizations.UseAdvSimd)
+ {
+ InstEmitSimdHelperArm64.EmitVectorTernaryOpRd(context, Intrinsic.Arm64BitV);
+ }
+ else
+ {
+ EmitBifBit(context, notRm: false);
+ }
}
private static void EmitBifBit(ArmEmitterContext context, bool notRm)
@@ -167,7 +189,11 @@ namespace ARMeilleure.Instructions
public static void Bsl_V(ArmEmitterContext context)
{
- if (Optimizations.UseSse2)
+ if (Optimizations.UseAdvSimd)
+ {
+ InstEmitSimdHelperArm64.EmitVectorTernaryOpRd(context, Intrinsic.Arm64BslV);
+ }
+ else if (Optimizations.UseSse2)
{
OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
@@ -200,7 +226,11 @@ namespace ARMeilleure.Instructions
public static void Eor_V(ArmEmitterContext context)
{
- if (Optimizations.UseSse2)
+ if (Optimizations.UseAdvSimd)
+ {
+ InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64EorV);
+ }
+ else if (Optimizations.UseSse2)
{
OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
@@ -249,7 +279,11 @@ namespace ARMeilleure.Instructions
public static void Orn_V(ArmEmitterContext context)
{
- if (Optimizations.UseSse2)
+ if (Optimizations.UseAdvSimd)
+ {
+ InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64OrnV);
+ }
+ else if (Optimizations.UseSse2)
{
OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
@@ -280,7 +314,11 @@ namespace ARMeilleure.Instructions
public static void Orr_V(ArmEmitterContext context)
{
- if (Optimizations.UseSse2)
+ if (Optimizations.UseAdvSimd)
+ {
+ InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64OrrV);
+ }
+ else if (Optimizations.UseSse2)
{
OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;