aboutsummaryrefslogtreecommitdiff
path: root/src/ARMeilleure/Instructions/InstEmitSimdCvt.cs
diff options
context:
space:
mode:
authorTSRBerry <20988865+TSRBerry@users.noreply.github.com>2023-06-26 07:25:06 +0200
committerGitHub <noreply@github.com>2023-06-26 07:25:06 +0200
commitff53dcf5607a82ad38388502b4cf5cc8cca77733 (patch)
treeeef4e2781d078ca62eee5da4ace8ed3323914c4a /src/ARMeilleure/Instructions/InstEmitSimdCvt.cs
parent2de78a2d55a1306761788570ab192897299c55d8 (diff)
[ARMeilleure] Address dotnet-format issues (#5357)1.1.923
* dotnet format style --severity info Some changes were manually reverted. * dotnet format analyzers --serverity info Some changes have been minimally adapted. * Restore a few unused methods and variables * Silence dotnet format IDE0060 warnings * Silence dotnet format IDE0052 warnings * Address or silence dotnet format IDE1006 warnings * Address or silence dotnet format CA2208 warnings * Address dotnet format CA1822 warnings * Address or silence dotnet format CA1069 warnings * Silence CA1806 and CA1834 issues * Address dotnet format CA1401 warnings * Fix new dotnet-format issues after rebase * Address review comments * Address dotnet format CA2208 warnings properly * Fix formatting for switch expressions * Address most dotnet format whitespace warnings * Apply dotnet format whitespace formatting A few of them have been manually reverted and the corresponding warning was silenced * Add previously silenced warnings back I have no clue how these disappeared * Revert formatting changes for OpCodeTable.cs * Enable formatting for a few cases again * Format if-blocks correctly * Enable formatting for a few more cases again * Fix inline comment alignment * Run dotnet format after rebase and remove unused usings - analyzers - style - whitespace * Disable 'prefer switch expression' rule * Add comments to disabled warnings * Remove a few unused parameters * Adjust namespaces * Simplify properties and array initialization, Use const when possible, Remove trailing commas * Start working on disabled warnings * Fix and silence a few dotnet-format warnings again * Address IDE0251 warnings * Address a few disabled IDE0060 warnings * Silence IDE0060 in .editorconfig * Revert "Simplify properties and array initialization, Use const when possible, Remove trailing commas" This reverts commit 9462e4136c0a2100dc28b20cf9542e06790aa67e. * dotnet format whitespace after rebase * First dotnet format pass * Remove unnecessary formatting exclusion * Add unsafe dotnet format changes * Change visibility of JitSupportDarwin to internal
Diffstat (limited to 'src/ARMeilleure/Instructions/InstEmitSimdCvt.cs')
-rw-r--r--src/ARMeilleure/Instructions/InstEmitSimdCvt.cs149
1 files changed, 74 insertions, 75 deletions
diff --git a/src/ARMeilleure/Instructions/InstEmitSimdCvt.cs b/src/ARMeilleure/Instructions/InstEmitSimdCvt.cs
index 652ad397..3363a7c7 100644
--- a/src/ARMeilleure/Instructions/InstEmitSimdCvt.cs
+++ b/src/ARMeilleure/Instructions/InstEmitSimdCvt.cs
@@ -5,7 +5,6 @@ using ARMeilleure.Translation;
using System;
using System.Diagnostics;
using System.Reflection;
-
using static ARMeilleure.Instructions.InstEmitHelper;
using static ARMeilleure.Instructions.InstEmitSimdHelper;
using static ARMeilleure.IntermediateRepresentation.Operand.Factory;
@@ -67,8 +66,8 @@ namespace ARMeilleure.Instructions
Operand n = GetVec(op.Rn);
Operand res = context.AddIntrinsic(Intrinsic.X86Vcvtps2ph, n, Const(X86GetRoundControl(FPRoundingMode.ToNearest)));
- res = context.AddIntrinsic(Intrinsic.X86Pslldq, res, Const(14)); // VectorZeroUpper112()
- res = context.AddIntrinsic(Intrinsic.X86Psrldq, res, Const(14));
+ res = context.AddIntrinsic(Intrinsic.X86Pslldq, res, Const(14)); // VectorZeroUpper112()
+ res = context.AddIntrinsic(Intrinsic.X86Psrldq, res, Const(14));
context.Copy(GetVec(op.Rd), res);
}
@@ -92,7 +91,7 @@ namespace ARMeilleure.Instructions
Debug.Assert(!Optimizations.ForceLegacySse);
Operand res = context.AddIntrinsic(Intrinsic.X86Vcvtph2ps, GetVec(op.Rn));
- res = context.VectorZeroUpper96(res);
+ res = context.VectorZeroUpper96(res);
context.Copy(GetVec(op.Rd), res);
}
@@ -116,7 +115,7 @@ namespace ARMeilleure.Instructions
Operand n = GetVec(op.Rn);
Operand res = context.AddIntrinsic(Intrinsic.X86Cvtsd2ss, context.VectorZero(), n);
- res = context.AddIntrinsic(Intrinsic.X86Vcvtps2ph, res, Const(X86GetRoundControl(FPRoundingMode.ToNearest)));
+ res = context.AddIntrinsic(Intrinsic.X86Vcvtps2ph, res, Const(X86GetRoundControl(FPRoundingMode.ToNearest)));
context.Copy(GetVec(op.Rd), res);
}
@@ -140,8 +139,8 @@ namespace ARMeilleure.Instructions
Operand n = GetVec(op.Rn);
Operand res = context.AddIntrinsic(Intrinsic.X86Vcvtph2ps, GetVec(op.Rn));
- res = context.AddIntrinsic(Intrinsic.X86Cvtss2sd, context.VectorZero(), res);
- res = context.VectorZeroUpper64(res);
+ res = context.AddIntrinsic(Intrinsic.X86Cvtss2sd, context.VectorZero(), res);
+ res = context.VectorZeroUpper64(res);
context.Copy(GetVec(op.Rd), res);
}
@@ -273,7 +272,7 @@ namespace ARMeilleure.Instructions
Operand n = GetVec(op.Rn);
Operand res = op.RegisterSize == RegisterSize.Simd128 ? context.AddIntrinsic(Intrinsic.X86Movhlps, n, n) : n;
- res = context.AddIntrinsic(Intrinsic.X86Cvtps2pd, res);
+ res = context.AddIntrinsic(Intrinsic.X86Cvtps2pd, res);
context.Copy(GetVec(op.Rd), res);
}
@@ -284,7 +283,7 @@ namespace ARMeilleure.Instructions
Operand n = GetVec(op.Rn);
Operand res = op.RegisterSize == RegisterSize.Simd128 ? context.AddIntrinsic(Intrinsic.X86Movhlps, n, n) : n;
- res = context.AddIntrinsic(Intrinsic.X86Vcvtph2ps, res);
+ res = context.AddIntrinsic(Intrinsic.X86Vcvtph2ps, res);
context.Copy(GetVec(op.Rd), res);
}
@@ -387,10 +386,10 @@ namespace ARMeilleure.Instructions
Intrinsic movInst = op.RegisterSize == RegisterSize.Simd128 ? Intrinsic.X86Movlhps : Intrinsic.X86Movhlps;
Operand nInt = context.AddIntrinsic(Intrinsic.X86Cvtpd2ps, GetVec(op.Rn));
- nInt = context.AddIntrinsic(Intrinsic.X86Movlhps, nInt, nInt);
+ nInt = context.AddIntrinsic(Intrinsic.X86Movlhps, nInt, nInt);
Operand res = context.VectorZeroUpper64(d);
- res = context.AddIntrinsic(movInst, res, nInt);
+ res = context.AddIntrinsic(movInst, res, nInt);
context.Copy(d, res);
}
@@ -404,10 +403,10 @@ namespace ARMeilleure.Instructions
Intrinsic movInst = op.RegisterSize == RegisterSize.Simd128 ? Intrinsic.X86Movlhps : Intrinsic.X86Movhlps;
Operand nInt = context.AddIntrinsic(Intrinsic.X86Vcvtps2ph, n, Const(X86GetRoundControl(FPRoundingMode.ToNearest)));
- nInt = context.AddIntrinsic(Intrinsic.X86Movlhps, nInt, nInt);
+ nInt = context.AddIntrinsic(Intrinsic.X86Movlhps, nInt, nInt);
Operand res = context.VectorZeroUpper64(d);
- res = context.AddIntrinsic(movInst, res, nInt);
+ res = context.AddIntrinsic(movInst, res, nInt);
context.Copy(d, res);
}
@@ -1225,15 +1224,15 @@ namespace ARMeilleure.Instructions
{
Debug.Assert(opF.Type == OperandType.V128);
- Operand longL = context.AddIntrinsicLong (Intrinsic.X86Cvtsd2si, opF); // opFL
- Operand res = context.VectorCreateScalar(longL);
+ Operand longL = context.AddIntrinsicLong(Intrinsic.X86Cvtsd2si, opF); // opFL
+ Operand res = context.VectorCreateScalar(longL);
if (!scalar)
{
- Operand opFH = context.AddIntrinsic (Intrinsic.X86Movhlps, res, opF); // res doesn't matter.
- Operand longH = context.AddIntrinsicLong (Intrinsic.X86Cvtsd2si, opFH);
- Operand resH = context.VectorCreateScalar(longH);
- res = context.AddIntrinsic (Intrinsic.X86Movlhps, res, resH);
+ Operand opFH = context.AddIntrinsic(Intrinsic.X86Movhlps, res, opF); // res doesn't matter.
+ Operand longH = context.AddIntrinsicLong(Intrinsic.X86Cvtsd2si, opFH);
+ Operand resH = context.VectorCreateScalar(longH);
+ res = context.AddIntrinsic(Intrinsic.X86Movlhps, res, resH);
}
return res;
@@ -1244,14 +1243,14 @@ namespace ARMeilleure.Instructions
Debug.Assert(op.Type == OperandType.V128);
Operand longL = context.AddIntrinsicLong(Intrinsic.X86Cvtsi2si, op); // opL
- Operand res = context.AddIntrinsic (Intrinsic.X86Cvtsi2sd, context.VectorZero(), longL);
+ Operand res = context.AddIntrinsic(Intrinsic.X86Cvtsi2sd, context.VectorZero(), longL);
if (!scalar)
{
- Operand opH = context.AddIntrinsic (Intrinsic.X86Movhlps, res, op); // res doesn't matter.
+ Operand opH = context.AddIntrinsic(Intrinsic.X86Movhlps, res, op); // res doesn't matter.
Operand longH = context.AddIntrinsicLong(Intrinsic.X86Cvtsi2si, opH);
- Operand resH = context.AddIntrinsic (Intrinsic.X86Cvtsi2sd, res, longH); // res doesn't matter.
- res = context.AddIntrinsic (Intrinsic.X86Movlhps, res, resH);
+ Operand resH = context.AddIntrinsic(Intrinsic.X86Cvtsi2sd, res, longH); // res doesn't matter.
+ res = context.AddIntrinsic(Intrinsic.X86Movlhps, res, resH);
}
return res;
@@ -1278,7 +1277,7 @@ namespace ARMeilleure.Instructions
int fpScaled = 0x3F800000 - fBits * 0x800000;
Operand fpScaledMask = scalar
- ? X86GetScalar (context, fpScaled)
+ ? X86GetScalar(context, fpScaled)
: X86GetAllElements(context, fpScaled);
res = context.AddIntrinsic(Intrinsic.X86Mulps, res, fpScaledMask);
@@ -1307,7 +1306,7 @@ namespace ARMeilleure.Instructions
long fpScaled = 0x3FF0000000000000L - fBits * 0x10000000000000L;
Operand fpScaledMask = scalar
- ? X86GetScalar (context, fpScaled)
+ ? X86GetScalar(context, fpScaled)
: X86GetAllElements(context, fpScaled);
res = context.AddIntrinsic(Intrinsic.X86Mulpd, res, fpScaledMask);
@@ -1334,16 +1333,16 @@ namespace ARMeilleure.Instructions
if (sizeF == 0)
{
Operand mask = scalar // 65536.000f (1 << 16)
- ? X86GetScalar (context, 0x47800000)
+ ? X86GetScalar(context, 0x47800000)
: X86GetAllElements(context, 0x47800000);
Operand res = context.AddIntrinsic(Intrinsic.X86Psrld, n, Const(16));
- res = context.AddIntrinsic(Intrinsic.X86Cvtdq2ps, res);
- res = context.AddIntrinsic(Intrinsic.X86Mulps, res, mask);
+ res = context.AddIntrinsic(Intrinsic.X86Cvtdq2ps, res);
+ res = context.AddIntrinsic(Intrinsic.X86Mulps, res, mask);
Operand res2 = context.AddIntrinsic(Intrinsic.X86Pslld, n, Const(16));
- res2 = context.AddIntrinsic(Intrinsic.X86Psrld, res2, Const(16));
- res2 = context.AddIntrinsic(Intrinsic.X86Cvtdq2ps, res2);
+ res2 = context.AddIntrinsic(Intrinsic.X86Psrld, res2, Const(16));
+ res2 = context.AddIntrinsic(Intrinsic.X86Cvtdq2ps, res2);
res = context.AddIntrinsic(Intrinsic.X86Addps, res, res2);
@@ -1355,7 +1354,7 @@ namespace ARMeilleure.Instructions
int fpScaled = 0x3F800000 - fBits * 0x800000;
Operand fpScaledMask = scalar
- ? X86GetScalar (context, fpScaled)
+ ? X86GetScalar(context, fpScaled)
: X86GetAllElements(context, fpScaled);
res = context.AddIntrinsic(Intrinsic.X86Mulps, res, fpScaledMask);
@@ -1375,16 +1374,16 @@ namespace ARMeilleure.Instructions
else /* if (sizeF == 1) */
{
Operand mask = scalar // 4294967296.0000000d (1L << 32)
- ? X86GetScalar (context, 0x41F0000000000000L)
+ ? X86GetScalar(context, 0x41F0000000000000L)
: X86GetAllElements(context, 0x41F0000000000000L);
- Operand res = context.AddIntrinsic (Intrinsic.X86Psrlq, n, Const(32));
- res = EmitSse2CvtInt64ToDoubleOp(context, res, scalar);
- res = context.AddIntrinsic (Intrinsic.X86Mulpd, res, mask);
+ Operand res = context.AddIntrinsic(Intrinsic.X86Psrlq, n, Const(32));
+ res = EmitSse2CvtInt64ToDoubleOp(context, res, scalar);
+ res = context.AddIntrinsic(Intrinsic.X86Mulpd, res, mask);
- Operand res2 = context.AddIntrinsic (Intrinsic.X86Psllq, n, Const(32));
- res2 = context.AddIntrinsic (Intrinsic.X86Psrlq, res2, Const(32));
- res2 = EmitSse2CvtInt64ToDoubleOp(context, res2, scalar);
+ Operand res2 = context.AddIntrinsic(Intrinsic.X86Psllq, n, Const(32));
+ res2 = context.AddIntrinsic(Intrinsic.X86Psrlq, res2, Const(32));
+ res2 = EmitSse2CvtInt64ToDoubleOp(context, res2, scalar);
res = context.AddIntrinsic(Intrinsic.X86Addpd, res, res2);
@@ -1396,7 +1395,7 @@ namespace ARMeilleure.Instructions
long fpScaled = 0x3FF0000000000000L - fBits * 0x10000000000000L;
Operand fpScaledMask = scalar
- ? X86GetScalar (context, fpScaled)
+ ? X86GetScalar(context, fpScaled)
: X86GetAllElements(context, fpScaled);
res = context.AddIntrinsic(Intrinsic.X86Mulpd, res, fpScaledMask);
@@ -1423,7 +1422,7 @@ namespace ARMeilleure.Instructions
if (sizeF == 0)
{
Operand nRes = context.AddIntrinsic(Intrinsic.X86Cmpps, n, n, Const((int)CmpCondition.OrderedQ));
- nRes = context.AddIntrinsic(Intrinsic.X86Pand, nRes, n);
+ nRes = context.AddIntrinsic(Intrinsic.X86Pand, nRes, n);
if (op is OpCodeSimdShImm fixedOp)
{
@@ -1433,7 +1432,7 @@ namespace ARMeilleure.Instructions
int fpScaled = 0x3F800000 + fBits * 0x800000;
Operand fpScaledMask = scalar
- ? X86GetScalar (context, fpScaled)
+ ? X86GetScalar(context, fpScaled)
: X86GetAllElements(context, fpScaled);
nRes = context.AddIntrinsic(Intrinsic.X86Mulps, nRes, fpScaledMask);
@@ -1451,7 +1450,7 @@ namespace ARMeilleure.Instructions
Operand nInt = context.AddIntrinsic(Intrinsic.X86Cvtps2dq, nRes);
Operand fpMaxValMask = scalar // 2.14748365E9f (2147483648)
- ? X86GetScalar (context, 0x4F000000)
+ ? X86GetScalar(context, 0x4F000000)
: X86GetAllElements(context, 0x4F000000);
nRes = context.AddIntrinsic(Intrinsic.X86Cmpps, nRes, fpMaxValMask, Const((int)CmpCondition.NotLessThan));
@@ -1472,7 +1471,7 @@ namespace ARMeilleure.Instructions
else /* if (sizeF == 1) */
{
Operand nRes = context.AddIntrinsic(Intrinsic.X86Cmppd, n, n, Const((int)CmpCondition.OrderedQ));
- nRes = context.AddIntrinsic(Intrinsic.X86Pand, nRes, n);
+ nRes = context.AddIntrinsic(Intrinsic.X86Pand, nRes, n);
if (op is OpCodeSimdShImm fixedOp)
{
@@ -1482,7 +1481,7 @@ namespace ARMeilleure.Instructions
long fpScaled = 0x3FF0000000000000L + fBits * 0x10000000000000L;
Operand fpScaledMask = scalar
- ? X86GetScalar (context, fpScaled)
+ ? X86GetScalar(context, fpScaled)
: X86GetAllElements(context, fpScaled);
nRes = context.AddIntrinsic(Intrinsic.X86Mulpd, nRes, fpScaledMask);
@@ -1500,7 +1499,7 @@ namespace ARMeilleure.Instructions
Operand nLong = EmitSse2CvtDoubleToInt64OpF(context, nRes, scalar);
Operand fpMaxValMask = scalar // 9.2233720368547760E18d (9223372036854775808)
- ? X86GetScalar (context, 0x43E0000000000000L)
+ ? X86GetScalar(context, 0x43E0000000000000L)
: X86GetAllElements(context, 0x43E0000000000000L);
nRes = context.AddIntrinsic(Intrinsic.X86Cmppd, nRes, fpMaxValMask, Const((int)CmpCondition.NotLessThan));
@@ -1528,7 +1527,7 @@ namespace ARMeilleure.Instructions
if (sizeF == 0)
{
Operand nRes = context.AddIntrinsic(Intrinsic.X86Cmpps, n, n, Const((int)CmpCondition.OrderedQ));
- nRes = context.AddIntrinsic(Intrinsic.X86Pand, nRes, n);
+ nRes = context.AddIntrinsic(Intrinsic.X86Pand, nRes, n);
if (op is OpCodeSimdShImm fixedOp)
{
@@ -1538,7 +1537,7 @@ namespace ARMeilleure.Instructions
int fpScaled = 0x3F800000 + fBits * 0x800000;
Operand fpScaledMask = scalar
- ? X86GetScalar (context, fpScaled)
+ ? X86GetScalar(context, fpScaled)
: X86GetAllElements(context, fpScaled);
nRes = context.AddIntrinsic(Intrinsic.X86Mulps, nRes, fpScaledMask);
@@ -1556,10 +1555,10 @@ namespace ARMeilleure.Instructions
Operand zero = context.VectorZero();
Operand nCmp = context.AddIntrinsic(Intrinsic.X86Cmpps, nRes, zero, Const((int)CmpCondition.NotLessThanOrEqual));
- nRes = context.AddIntrinsic(Intrinsic.X86Pand, nRes, nCmp);
+ nRes = context.AddIntrinsic(Intrinsic.X86Pand, nRes, nCmp);
Operand fpMaxValMask = scalar // 2.14748365E9f (2147483648)
- ? X86GetScalar (context, 0x4F000000)
+ ? X86GetScalar(context, 0x4F000000)
: X86GetAllElements(context, 0x4F000000);
Operand nInt = context.AddIntrinsic(Intrinsic.X86Cvtps2dq, nRes);
@@ -1567,14 +1566,14 @@ namespace ARMeilleure.Instructions
nRes = context.AddIntrinsic(Intrinsic.X86Subps, nRes, fpMaxValMask);
nCmp = context.AddIntrinsic(Intrinsic.X86Cmpps, nRes, zero, Const((int)CmpCondition.NotLessThanOrEqual));
- nRes = context.AddIntrinsic(Intrinsic.X86Pand, nRes, nCmp);
+ nRes = context.AddIntrinsic(Intrinsic.X86Pand, nRes, nCmp);
Operand nInt2 = context.AddIntrinsic(Intrinsic.X86Cvtps2dq, nRes);
nRes = context.AddIntrinsic(Intrinsic.X86Cmpps, nRes, fpMaxValMask, Const((int)CmpCondition.NotLessThan));
- Operand dRes = context.AddIntrinsic(Intrinsic.X86Pxor, nInt2, nRes);
- dRes = context.AddIntrinsic(Intrinsic.X86Paddd, dRes, nInt);
+ Operand dRes = context.AddIntrinsic(Intrinsic.X86Pxor, nInt2, nRes);
+ dRes = context.AddIntrinsic(Intrinsic.X86Paddd, dRes, nInt);
if (scalar)
{
@@ -1590,7 +1589,7 @@ namespace ARMeilleure.Instructions
else /* if (sizeF == 1) */
{
Operand nRes = context.AddIntrinsic(Intrinsic.X86Cmppd, n, n, Const((int)CmpCondition.OrderedQ));
- nRes = context.AddIntrinsic(Intrinsic.X86Pand, nRes, n);
+ nRes = context.AddIntrinsic(Intrinsic.X86Pand, nRes, n);
if (op is OpCodeSimdShImm fixedOp)
{
@@ -1600,7 +1599,7 @@ namespace ARMeilleure.Instructions
long fpScaled = 0x3FF0000000000000L + fBits * 0x10000000000000L;
Operand fpScaledMask = scalar
- ? X86GetScalar (context, fpScaled)
+ ? X86GetScalar(context, fpScaled)
: X86GetAllElements(context, fpScaled);
nRes = context.AddIntrinsic(Intrinsic.X86Mulpd, nRes, fpScaledMask);
@@ -1618,10 +1617,10 @@ namespace ARMeilleure.Instructions
Operand zero = context.VectorZero();
Operand nCmp = context.AddIntrinsic(Intrinsic.X86Cmppd, nRes, zero, Const((int)CmpCondition.NotLessThanOrEqual));
- nRes = context.AddIntrinsic(Intrinsic.X86Pand, nRes, nCmp);
+ nRes = context.AddIntrinsic(Intrinsic.X86Pand, nRes, nCmp);
Operand fpMaxValMask = scalar // 9.2233720368547760E18d (9223372036854775808)
- ? X86GetScalar (context, 0x43E0000000000000L)
+ ? X86GetScalar(context, 0x43E0000000000000L)
: X86GetAllElements(context, 0x43E0000000000000L);
Operand nLong = EmitSse2CvtDoubleToInt64OpF(context, nRes, scalar);
@@ -1629,14 +1628,14 @@ namespace ARMeilleure.Instructions
nRes = context.AddIntrinsic(Intrinsic.X86Subpd, nRes, fpMaxValMask);
nCmp = context.AddIntrinsic(Intrinsic.X86Cmppd, nRes, zero, Const((int)CmpCondition.NotLessThanOrEqual));
- nRes = context.AddIntrinsic(Intrinsic.X86Pand, nRes, nCmp);
+ nRes = context.AddIntrinsic(Intrinsic.X86Pand, nRes, nCmp);
Operand nLong2 = EmitSse2CvtDoubleToInt64OpF(context, nRes, scalar);
nRes = context.AddIntrinsic(Intrinsic.X86Cmppd, nRes, fpMaxValMask, Const((int)CmpCondition.NotLessThan));
- Operand dRes = context.AddIntrinsic(Intrinsic.X86Pxor, nLong2, nRes);
- dRes = context.AddIntrinsic(Intrinsic.X86Paddq, dRes, nLong);
+ Operand dRes = context.AddIntrinsic(Intrinsic.X86Pxor, nLong2, nRes);
+ dRes = context.AddIntrinsic(Intrinsic.X86Paddq, dRes, nLong);
if (scalar)
{
@@ -1656,7 +1655,7 @@ namespace ARMeilleure.Instructions
if (op.Size == 0)
{
Operand nRes = context.AddIntrinsic(Intrinsic.X86Cmpss, n, n, Const((int)CmpCondition.OrderedQ));
- nRes = context.AddIntrinsic(Intrinsic.X86Pand, nRes, n);
+ nRes = context.AddIntrinsic(Intrinsic.X86Pand, nRes, n);
if (isFixed)
{
@@ -1678,7 +1677,7 @@ namespace ARMeilleure.Instructions
}
Operand nIntOrLong = op.RegisterSize == RegisterSize.Int32
- ? context.AddIntrinsicInt (Intrinsic.X86Cvtss2si, nRes)
+ ? context.AddIntrinsicInt(Intrinsic.X86Cvtss2si, nRes)
: context.AddIntrinsicLong(Intrinsic.X86Cvtss2si, nRes);
int fpMaxVal = op.RegisterSize == RegisterSize.Int32
@@ -1703,7 +1702,7 @@ namespace ARMeilleure.Instructions
else /* if (op.Size == 1) */
{
Operand nRes = context.AddIntrinsic(Intrinsic.X86Cmpsd, n, n, Const((int)CmpCondition.OrderedQ));
- nRes = context.AddIntrinsic(Intrinsic.X86Pand, nRes, n);
+ nRes = context.AddIntrinsic(Intrinsic.X86Pand, nRes, n);
if (isFixed)
{
@@ -1725,7 +1724,7 @@ namespace ARMeilleure.Instructions
}
Operand nIntOrLong = op.RegisterSize == RegisterSize.Int32
- ? context.AddIntrinsicInt (Intrinsic.X86Cvtsd2si, nRes)
+ ? context.AddIntrinsicInt(Intrinsic.X86Cvtsd2si, nRes)
: context.AddIntrinsicLong(Intrinsic.X86Cvtsd2si, nRes);
long fpMaxVal = op.RegisterSize == RegisterSize.Int32
@@ -1758,7 +1757,7 @@ namespace ARMeilleure.Instructions
if (op.Size == 0)
{
Operand nRes = context.AddIntrinsic(Intrinsic.X86Cmpss, n, n, Const((int)CmpCondition.OrderedQ));
- nRes = context.AddIntrinsic(Intrinsic.X86Pand, nRes, n);
+ nRes = context.AddIntrinsic(Intrinsic.X86Pand, nRes, n);
if (isFixed)
{
@@ -1782,7 +1781,7 @@ namespace ARMeilleure.Instructions
Operand zero = context.VectorZero();
Operand nCmp = context.AddIntrinsic(Intrinsic.X86Cmpss, nRes, zero, Const((int)CmpCondition.NotLessThanOrEqual));
- nRes = context.AddIntrinsic(Intrinsic.X86Pand, nRes, nCmp);
+ nRes = context.AddIntrinsic(Intrinsic.X86Pand, nRes, nCmp);
int fpMaxVal = op.RegisterSize == RegisterSize.Int32
? 0x4F000000 // 2.14748365E9f (2147483648)
@@ -1791,16 +1790,16 @@ namespace ARMeilleure.Instructions
Operand fpMaxValMask = X86GetScalar(context, fpMaxVal);
Operand nIntOrLong = op.RegisterSize == RegisterSize.Int32
- ? context.AddIntrinsicInt (Intrinsic.X86Cvtss2si, nRes)
+ ? context.AddIntrinsicInt(Intrinsic.X86Cvtss2si, nRes)
: context.AddIntrinsicLong(Intrinsic.X86Cvtss2si, nRes);
nRes = context.AddIntrinsic(Intrinsic.X86Subss, nRes, fpMaxValMask);
nCmp = context.AddIntrinsic(Intrinsic.X86Cmpss, nRes, zero, Const((int)CmpCondition.NotLessThanOrEqual));
- nRes = context.AddIntrinsic(Intrinsic.X86Pand, nRes, nCmp);
+ nRes = context.AddIntrinsic(Intrinsic.X86Pand, nRes, nCmp);
Operand nIntOrLong2 = op.RegisterSize == RegisterSize.Int32
- ? context.AddIntrinsicInt (Intrinsic.X86Cvtss2si, nRes)
+ ? context.AddIntrinsicInt(Intrinsic.X86Cvtss2si, nRes)
: context.AddIntrinsicLong(Intrinsic.X86Cvtss2si, nRes);
nRes = context.AddIntrinsic(Intrinsic.X86Cmpss, nRes, fpMaxValMask, Const((int)CmpCondition.NotLessThan));
@@ -1813,14 +1812,14 @@ namespace ARMeilleure.Instructions
}
Operand dRes = context.BitwiseExclusiveOr(nIntOrLong2, nInt);
- dRes = context.Add(dRes, nIntOrLong);
+ dRes = context.Add(dRes, nIntOrLong);
SetIntOrZR(context, op.Rd, dRes);
}
else /* if (op.Size == 1) */
{
Operand nRes = context.AddIntrinsic(Intrinsic.X86Cmpsd, n, n, Const((int)CmpCondition.OrderedQ));
- nRes = context.AddIntrinsic(Intrinsic.X86Pand, nRes, n);
+ nRes = context.AddIntrinsic(Intrinsic.X86Pand, nRes, n);
if (isFixed)
{
@@ -1844,7 +1843,7 @@ namespace ARMeilleure.Instructions
Operand zero = context.VectorZero();
Operand nCmp = context.AddIntrinsic(Intrinsic.X86Cmpsd, nRes, zero, Const((int)CmpCondition.NotLessThanOrEqual));
- nRes = context.AddIntrinsic(Intrinsic.X86Pand, nRes, nCmp);
+ nRes = context.AddIntrinsic(Intrinsic.X86Pand, nRes, nCmp);
long fpMaxVal = op.RegisterSize == RegisterSize.Int32
? 0x41E0000000000000L // 2147483648.0000000d (2147483648)
@@ -1853,16 +1852,16 @@ namespace ARMeilleure.Instructions
Operand fpMaxValMask = X86GetScalar(context, fpMaxVal);
Operand nIntOrLong = op.RegisterSize == RegisterSize.Int32
- ? context.AddIntrinsicInt (Intrinsic.X86Cvtsd2si, nRes)
+ ? context.AddIntrinsicInt(Intrinsic.X86Cvtsd2si, nRes)
: context.AddIntrinsicLong(Intrinsic.X86Cvtsd2si, nRes);
nRes = context.AddIntrinsic(Intrinsic.X86Subsd, nRes, fpMaxValMask);
nCmp = context.AddIntrinsic(Intrinsic.X86Cmpsd, nRes, zero, Const((int)CmpCondition.NotLessThanOrEqual));
- nRes = context.AddIntrinsic(Intrinsic.X86Pand, nRes, nCmp);
+ nRes = context.AddIntrinsic(Intrinsic.X86Pand, nRes, nCmp);
Operand nIntOrLong2 = op.RegisterSize == RegisterSize.Int32
- ? context.AddIntrinsicInt (Intrinsic.X86Cvtsd2si, nRes)
+ ? context.AddIntrinsicInt(Intrinsic.X86Cvtsd2si, nRes)
: context.AddIntrinsicLong(Intrinsic.X86Cvtsd2si, nRes);
nRes = context.AddIntrinsic(Intrinsic.X86Cmpsd, nRes, fpMaxValMask, Const((int)CmpCondition.NotLessThan));
@@ -1875,7 +1874,7 @@ namespace ARMeilleure.Instructions
}
Operand dRes = context.BitwiseExclusiveOr(nIntOrLong2, nLong);
- dRes = context.Add(dRes, nIntOrLong);
+ dRes = context.Add(dRes, nIntOrLong);
SetIntOrZR(context, op.Rd, dRes);
}