diff --git a/src/coreclr/src/jit/codegen.h b/src/coreclr/src/jit/codegen.h index 5d1c4b372902ebe6db7bc5a864cff4b1316d4416..e2bcdf75fb3c50a604ee5cb8e6377e009d1643cb 100644 --- a/src/coreclr/src/jit/codegen.h +++ b/src/coreclr/src/jit/codegen.h @@ -984,7 +984,7 @@ protected: #ifdef FEATURE_HW_INTRINSICS void genHWIntrinsic(GenTreeHWIntrinsic* node); #if defined(_TARGET_XARCH_) - void genHWIntrinsic_R_RM(GenTreeHWIntrinsic* node, instruction ins, emitAttr attr); + void genHWIntrinsic_R_RM(GenTreeHWIntrinsic* node, instruction ins, emitAttr attr, regNumber reg, GenTree* rmOp); void genHWIntrinsic_R_RM_I(GenTreeHWIntrinsic* node, instruction ins, int8_t ival); void genHWIntrinsic_R_R_RM(GenTreeHWIntrinsic* node, instruction ins, emitAttr attr); void genHWIntrinsic_R_R_RM( diff --git a/src/coreclr/src/jit/gentree.h b/src/coreclr/src/jit/gentree.h index e35f50dd618d868ad380f6eca75765991a9cd80d..76958f4b29f31066d3ee97f1d65257eff23a935c 100644 --- a/src/coreclr/src/jit/gentree.h +++ b/src/coreclr/src/jit/gentree.h @@ -5922,8 +5922,8 @@ struct GenCondition C = Unsigned | S, // = 14 NC = Unsigned | NS, // = 15 - FEQ = Float | EQ, // = 16 - FNE = Float | NE, // = 17 + FEQ = Float | 0, // = 16 + FNE = Float | 1, // = 17 FLT = Float | SLT, // = 18 FLE = Float | SLE, // = 19 FGE = Float | SGE, // = 20 diff --git a/src/coreclr/src/jit/hwintrinsiccodegenxarch.cpp b/src/coreclr/src/jit/hwintrinsiccodegenxarch.cpp index 4f81a83b109b9d3e385d5fb726e9128c077c38e8..ee94fe0e91b6f44065fee3b3df01b17098ecfa9f 100644 --- a/src/coreclr/src/jit/hwintrinsiccodegenxarch.cpp +++ b/src/coreclr/src/jit/hwintrinsiccodegenxarch.cpp @@ -142,7 +142,7 @@ void CodeGen::genHWIntrinsic(GenTreeHWIntrinsic* node) } else { - genHWIntrinsic_R_RM(node, ins, simdSize); + genHWIntrinsic_R_RM(node, ins, simdSize, targetReg, op1); } } break; @@ -252,6 +252,10 @@ void CodeGen::genHWIntrinsic(GenTreeHWIntrinsic* node) genHWIntrinsicJumpTableFallback(intrinsicId, op2Reg, baseReg, offsReg, emitSwCase); } } + else if (node->TypeGet() == TYP_VOID) + { + genHWIntrinsic_R_RM(node, ins, EA_ATTR(node->gtSIMDSize), op1Reg, op2); + } else { genHWIntrinsic_R_R_RM(node, ins, EA_ATTR(node->gtSIMDSize)); @@ -405,77 +409,57 @@ void CodeGen::genHWIntrinsic(GenTreeHWIntrinsic* node) } //------------------------------------------------------------------------ -// genHWIntrinsic_R_RM: Generates the code for a hardware intrinsic node that takes a -// register/memory operand and that returns a value in register +// genHWIntrinsic_R_RM: Generates code for a hardware intrinsic node that takes a +// register operand and a register/memory operand. // // Arguments: // node - The hardware intrinsic node // ins - The instruction being generated // attr - The emit attribute for the instruciton being generated +// reg - The register +// rmOp - The register/memory operand node // -void CodeGen::genHWIntrinsic_R_RM(GenTreeHWIntrinsic* node, instruction ins, emitAttr attr) +void CodeGen::genHWIntrinsic_R_RM( + GenTreeHWIntrinsic* node, instruction ins, emitAttr attr, regNumber reg, GenTree* rmOp) { - var_types targetType = node->TypeGet(); - regNumber targetReg = node->gtRegNum; - GenTree* op1 = node->gtGetOp1(); - GenTree* op2 = node->gtGetOp2(); - emitter* emit = GetEmitter(); - - if (op2 != nullptr) - { - // The CompareScalarOrdered* and CompareScalarUnordered* intrinsics come down this - // code path. They are all MultiIns, as the return value comes from the flags and - // we have two operands instead. - - assert(HWIntrinsicInfo::GeneratesMultipleIns(node->gtHWIntrinsicId)); - assert(targetReg != REG_NA); - - targetReg = op1->gtRegNum; - op1 = op2; - op2 = nullptr; - } - else - { - assert(!node->OperIsCommutative()); - } + emitter* emit = GetEmitter(); - assert(targetReg != REG_NA); - assert(op2 == nullptr); + assert(reg != REG_NA); - if (op1->isContained() || op1->isUsedFromSpillTemp()) + if (rmOp->isContained() || rmOp->isUsedFromSpillTemp()) { assert(HWIntrinsicInfo::SupportsContainment(node->gtHWIntrinsicId)); - assertIsContainableHWIntrinsicOp(compiler->m_pLowering, node, op1); + assertIsContainableHWIntrinsicOp(compiler->m_pLowering, node, rmOp); TempDsc* tmpDsc = nullptr; unsigned varNum = BAD_VAR_NUM; unsigned offset = (unsigned)-1; - if (op1->isUsedFromSpillTemp()) + if (rmOp->isUsedFromSpillTemp()) { - assert(op1->IsRegOptional()); + assert(rmOp->IsRegOptional()); - tmpDsc = getSpillTempDsc(op1); + tmpDsc = getSpillTempDsc(rmOp); varNum = tmpDsc->tdTempNum(); offset = 0; regSet.tmpRlsTemp(tmpDsc); } - else if (op1->isIndir() || op1->OperIsHWIntrinsic()) + else if (rmOp->isIndir() || rmOp->OperIsHWIntrinsic()) { GenTree* addr; GenTreeIndir* memIndir = nullptr; - if (op1->isIndir()) + if (rmOp->isIndir()) { - memIndir = op1->AsIndir(); + memIndir = rmOp->AsIndir(); addr = memIndir->Addr(); } else { - assert(op1->AsHWIntrinsic()->OperIsMemoryLoad()); - assert(HWIntrinsicInfo::lookupNumArgs(op1->AsHWIntrinsic()) == 1); - addr = op1->gtGetOp1(); + assert(rmOp->AsHWIntrinsic()->OperIsMemoryLoad()); + assert(HWIntrinsicInfo::lookupNumArgs(rmOp->AsHWIntrinsic()) == 1); + addr = rmOp->gtGetOp1(); } switch (addr->OperGet()) @@ -489,7 +473,7 @@ void CodeGen::genHWIntrinsic_R_RM(GenTreeHWIntrinsic* node, instruction ins, emi case GT_CLS_VAR_ADDR: { - emit->emitIns_R_C(ins, attr, targetReg, addr->gtClsVar.gtClsVarHnd, 0); + emit->emitIns_R_C(ins, attr, reg, addr->AsClsVar()->gtClsVarHnd, 0); return; } @@ -500,31 +484,31 @@ void CodeGen::genHWIntrinsic_R_RM(GenTreeHWIntrinsic* node, instruction ins, emi // This is the HW intrinsic load case. // Until we improve the handling of addressing modes in the emitter, we'll create a // temporary GT_IND to generate code with. - GenTreeIndir load = indirForm(op1->TypeGet(), addr); + GenTreeIndir load = indirForm(rmOp->TypeGet(), addr); memIndir = &load; } - emit->emitIns_R_A(ins, attr, targetReg, memIndir); + emit->emitIns_R_A(ins, attr, reg, memIndir); return; } } } else { - switch (op1->OperGet()) + switch (rmOp->OperGet()) { case GT_LCL_FLD: { - GenTreeLclFld* lclField = op1->AsLclFld(); + GenTreeLclFld* lclField = rmOp->AsLclFld(); varNum = lclField->GetLclNum(); - offset = lclField->gtLclFld.gtLclOffs; + offset = lclField->gtLclOffs; break; } case GT_LCL_VAR: { - assert(op1->IsRegOptional() || !compiler->lvaTable[op1->gtLclVar.gtLclNum].lvIsRegCandidate()); - varNum = op1->AsLclVar()->GetLclNum(); + assert(rmOp->IsRegOptional() || !compiler->lvaGetDesc(rmOp->AsLclVar())->lvIsRegCandidate()); + varNum = rmOp->AsLclVar()->GetLclNum(); offset = 0; break; } @@ -543,12 +527,11 @@ void CodeGen::genHWIntrinsic_R_RM(GenTreeHWIntrinsic* node, instruction ins, emi assert((varNum != BAD_VAR_NUM) || (tmpDsc != nullptr)); assert(offset != (unsigned)-1); - emit->emitIns_R_S(ins, attr, targetReg, varNum, offset); + emit->emitIns_R_S(ins, attr, reg, varNum, offset); } else { - regNumber op1Reg = op1->gtRegNum; - emit->emitIns_R_R(ins, attr, targetReg, op1Reg); + emit->emitIns_R_R(ins, attr, reg, rmOp->GetRegNum()); } } @@ -1298,7 +1281,7 @@ void CodeGen::genBaseIntrinsic(GenTreeHWIntrinsic* node) { if (varTypeIsIntegral(baseType)) { - genHWIntrinsic_R_RM(node, ins, emitActualTypeSize(baseType)); + genHWIntrinsic_R_RM(node, ins, emitActualTypeSize(baseType), targetReg, op1); } else { @@ -1308,7 +1291,7 @@ void CodeGen::genBaseIntrinsic(GenTreeHWIntrinsic* node) if (op1->isContained() || op1->isUsedFromSpillTemp()) { - genHWIntrinsic_R_RM(node, ins, attr); + genHWIntrinsic_R_RM(node, ins, attr, targetReg, op1); } else if (targetReg != op1Reg) { @@ -1328,7 +1311,7 @@ void CodeGen::genBaseIntrinsic(GenTreeHWIntrinsic* node) if (op1->isContained() || op1->isUsedFromSpillTemp()) { - genHWIntrinsic_R_RM(node, ins, attr); + genHWIntrinsic_R_RM(node, ins, attr, targetReg, op1); } else if (targetReg != op1Reg) { @@ -1348,7 +1331,7 @@ void CodeGen::genBaseIntrinsic(GenTreeHWIntrinsic* node) if (op1->isContained() || op1->isUsedFromSpillTemp()) { - genHWIntrinsic_R_RM(node, ins, attr); + genHWIntrinsic_R_RM(node, ins, attr, targetReg, op1); } else { @@ -1363,7 +1346,7 @@ void CodeGen::genBaseIntrinsic(GenTreeHWIntrinsic* node) { if (op1->isContained() || op1->isUsedFromSpillTemp()) { - genHWIntrinsic_R_RM(node, ins, attr); + genHWIntrinsic_R_RM(node, ins, attr, targetReg, op1); } else if (targetReg != op1Reg) { @@ -1418,90 +1401,6 @@ void CodeGen::genSSEIntrinsic(GenTreeHWIntrinsic* node) switch (intrinsicId) { - case NI_SSE_CompareScalarOrderedEqual: - case NI_SSE_CompareScalarUnorderedEqual: - { - assert(baseType == TYP_FLOAT); - regNumber tmpReg = node->GetSingleTempReg(); - instruction ins = HWIntrinsicInfo::lookupIns(intrinsicId, node->gtSIMDBaseType); - - // Ensure we aren't overwriting targetReg - assert(tmpReg != targetReg); - - genHWIntrinsic_R_RM(node, ins, emitTypeSize(TYP_SIMD16)); - emit->emitIns_R(INS_setnp, EA_1BYTE, targetReg); - emit->emitIns_R(INS_sete, EA_1BYTE, tmpReg); - emit->emitIns_R_R(INS_and, EA_1BYTE, tmpReg, targetReg); - emit->emitIns_R_R(INS_movzx, EA_1BYTE, targetReg, tmpReg); - break; - } - - case NI_SSE_CompareScalarOrderedGreaterThan: - case NI_SSE_CompareScalarUnorderedGreaterThan: - { - assert(baseType == TYP_FLOAT); - instruction ins = HWIntrinsicInfo::lookupIns(intrinsicId, node->gtSIMDBaseType); - - genHWIntrinsic_R_RM(node, ins, emitTypeSize(TYP_SIMD16)); - emit->emitIns_R(INS_seta, EA_1BYTE, targetReg); - emit->emitIns_R_R(INS_movzx, EA_1BYTE, targetReg, targetReg); - break; - } - - case NI_SSE_CompareScalarOrderedGreaterThanOrEqual: - case NI_SSE_CompareScalarUnorderedGreaterThanOrEqual: - { - assert(baseType == TYP_FLOAT); - instruction ins = HWIntrinsicInfo::lookupIns(intrinsicId, node->gtSIMDBaseType); - - genHWIntrinsic_R_RM(node, ins, emitTypeSize(TYP_SIMD16)); - emit->emitIns_R(INS_setae, EA_1BYTE, targetReg); - emit->emitIns_R_R(INS_movzx, EA_1BYTE, targetReg, targetReg); - break; - } - - case NI_SSE_CompareScalarOrderedLessThan: - case NI_SSE_CompareScalarUnorderedLessThan: - { - assert(baseType == TYP_FLOAT); - instruction ins = HWIntrinsicInfo::lookupIns(intrinsicId, node->gtSIMDBaseType); - - genHWIntrinsic_R_RM(node, ins, emitTypeSize(TYP_SIMD16)); - emit->emitIns_R(INS_seta, EA_1BYTE, targetReg); - emit->emitIns_R_R(INS_movzx, EA_1BYTE, targetReg, targetReg); - break; - } - - case NI_SSE_CompareScalarOrderedLessThanOrEqual: - case NI_SSE_CompareScalarUnorderedLessThanOrEqual: - { - assert(baseType == TYP_FLOAT); - instruction ins = HWIntrinsicInfo::lookupIns(intrinsicId, node->gtSIMDBaseType); - - genHWIntrinsic_R_RM(node, ins, emitTypeSize(TYP_SIMD16)); - emit->emitIns_R(INS_setae, EA_1BYTE, targetReg); - emit->emitIns_R_R(INS_movzx, EA_1BYTE, targetReg, targetReg); - break; - } - - case NI_SSE_CompareScalarOrderedNotEqual: - case NI_SSE_CompareScalarUnorderedNotEqual: - { - assert(baseType == TYP_FLOAT); - regNumber tmpReg = node->GetSingleTempReg(); - instruction ins = HWIntrinsicInfo::lookupIns(intrinsicId, node->gtSIMDBaseType); - - // Ensure we aren't overwriting targetReg - assert(tmpReg != targetReg); - - genHWIntrinsic_R_RM(node, ins, emitTypeSize(TYP_SIMD16)); - emit->emitIns_R(INS_setp, EA_1BYTE, targetReg); - emit->emitIns_R(INS_setne, EA_1BYTE, tmpReg); - emit->emitIns_R_R(INS_or, EA_1BYTE, tmpReg, targetReg); - emit->emitIns_R_R(INS_movzx, EA_1BYTE, targetReg, tmpReg); - break; - } - case NI_SSE_X64_ConvertToInt64: case NI_SSE_X64_ConvertToInt64WithTruncation: { @@ -1509,7 +1408,7 @@ void CodeGen::genSSEIntrinsic(GenTreeHWIntrinsic* node) assert(op1 != nullptr); assert(op2 == nullptr); instruction ins = HWIntrinsicInfo::lookupIns(intrinsicId, baseType); - genHWIntrinsic_R_RM(node, ins, EA_8BYTE); + genHWIntrinsic_R_RM(node, ins, EA_8BYTE, targetReg, op1); break; } @@ -1597,90 +1496,6 @@ void CodeGen::genSSE2Intrinsic(GenTreeHWIntrinsic* node) break; } - case NI_SSE2_CompareScalarOrderedEqual: - case NI_SSE2_CompareScalarUnorderedEqual: - { - assert(baseType == TYP_DOUBLE); - regNumber tmpReg = node->GetSingleTempReg(); - instruction ins = HWIntrinsicInfo::lookupIns(intrinsicId, baseType); - - // Ensure we aren't overwriting targetReg - assert(tmpReg != targetReg); - - genHWIntrinsic_R_RM(node, ins, emitTypeSize(TYP_SIMD16)); - emit->emitIns_R(INS_setnp, EA_1BYTE, targetReg); - emit->emitIns_R(INS_sete, EA_1BYTE, tmpReg); - emit->emitIns_R_R(INS_and, EA_1BYTE, tmpReg, targetReg); - emit->emitIns_R_R(INS_movzx, EA_1BYTE, targetReg, tmpReg); - break; - } - - case NI_SSE2_CompareScalarOrderedGreaterThan: - case NI_SSE2_CompareScalarUnorderedGreaterThan: - { - assert(baseType == TYP_DOUBLE); - instruction ins = HWIntrinsicInfo::lookupIns(intrinsicId, baseType); - - genHWIntrinsic_R_RM(node, ins, emitTypeSize(TYP_SIMD16)); - emit->emitIns_R(INS_seta, EA_1BYTE, targetReg); - emit->emitIns_R_R(INS_movzx, EA_1BYTE, targetReg, targetReg); - break; - } - - case NI_SSE2_CompareScalarOrderedGreaterThanOrEqual: - case NI_SSE2_CompareScalarUnorderedGreaterThanOrEqual: - { - assert(baseType == TYP_DOUBLE); - instruction ins = HWIntrinsicInfo::lookupIns(intrinsicId, baseType); - - genHWIntrinsic_R_RM(node, ins, emitTypeSize(TYP_SIMD16)); - emit->emitIns_R(INS_setae, EA_1BYTE, targetReg); - emit->emitIns_R_R(INS_movzx, EA_1BYTE, targetReg, targetReg); - break; - } - - case NI_SSE2_CompareScalarOrderedLessThan: - case NI_SSE2_CompareScalarUnorderedLessThan: - { - assert(baseType == TYP_DOUBLE); - instruction ins = HWIntrinsicInfo::lookupIns(intrinsicId, baseType); - - genHWIntrinsic_R_RM(node, ins, emitTypeSize(TYP_SIMD16)); - emit->emitIns_R(INS_seta, EA_1BYTE, targetReg); - emit->emitIns_R_R(INS_movzx, EA_1BYTE, targetReg, targetReg); - break; - } - - case NI_SSE2_CompareScalarOrderedLessThanOrEqual: - case NI_SSE2_CompareScalarUnorderedLessThanOrEqual: - { - assert(baseType == TYP_DOUBLE); - instruction ins = HWIntrinsicInfo::lookupIns(intrinsicId, baseType); - - genHWIntrinsic_R_RM(node, ins, emitTypeSize(TYP_SIMD16)); - emit->emitIns_R(INS_setae, EA_1BYTE, targetReg); - emit->emitIns_R_R(INS_movzx, EA_1BYTE, targetReg, targetReg); - break; - } - - case NI_SSE2_CompareScalarOrderedNotEqual: - case NI_SSE2_CompareScalarUnorderedNotEqual: - { - assert(baseType == TYP_DOUBLE); - instruction ins = HWIntrinsicInfo::lookupIns(intrinsicId, baseType); - regNumber tmpReg = node->GetSingleTempReg(); - - // Ensure we aren't overwriting targetReg - assert(tmpReg != targetReg); - - genHWIntrinsic_R_RM(node, ins, emitTypeSize(TYP_SIMD16)); - emit->emitIns_R(INS_setp, EA_1BYTE, targetReg); - emit->emitIns_R(INS_setne, EA_1BYTE, tmpReg); - emit->emitIns_R_R(INS_or, EA_1BYTE, tmpReg, targetReg); - emit->emitIns_R_R(INS_movzx, EA_1BYTE, targetReg, tmpReg); - break; - } - case NI_SSE2_X64_ConvertScalarToVector128Double: { assert(baseType == TYP_LONG); @@ -1698,7 +1513,7 @@ void CodeGen::genSSE2Intrinsic(GenTreeHWIntrinsic* node) assert(op1 != nullptr); assert(op2 == nullptr); instruction ins = HWIntrinsicInfo::lookupIns(intrinsicId, baseType); - genHWIntrinsic_R_RM(node, ins, emitTypeSize(baseType)); + genHWIntrinsic_R_RM(node, ins, emitTypeSize(baseType), targetReg, op1); break; } @@ -1721,7 +1536,7 @@ void CodeGen::genSSE2Intrinsic(GenTreeHWIntrinsic* node) else { assert(baseType == TYP_DOUBLE || baseType == TYP_FLOAT); - genHWIntrinsic_R_RM(node, ins, emitTypeSize(targetType)); + genHWIntrinsic_R_RM(node, ins, emitTypeSize(targetType), targetReg, op1); } break; } @@ -1807,38 +1622,11 @@ void CodeGen::genSSE41Intrinsic(GenTreeHWIntrinsic* node) } else { - genHWIntrinsic_R_RM(node, ins, emitTypeSize(TYP_SIMD16)); + genHWIntrinsic_R_RM(node, ins, EA_16BYTE, targetReg, op1); } break; } - case NI_SSE41_TestZ: - { - assert(HWIntrinsicInfo::lookupIns(intrinsicId, node->gtSIMDBaseType) == INS_ptest); - genHWIntrinsic_R_RM(node, INS_ptest, emitTypeSize(TYP_SIMD16)); - emit->emitIns_R(INS_sete, EA_1BYTE, targetReg); - emit->emitIns_R_R(INS_movzx, EA_1BYTE, targetReg, targetReg); - break; - } - - case NI_SSE41_TestC: - { - assert(HWIntrinsicInfo::lookupIns(intrinsicId, node->gtSIMDBaseType) == INS_ptest); - genHWIntrinsic_R_RM(node, INS_ptest, emitTypeSize(TYP_SIMD16)); - emit->emitIns_R(INS_setb, EA_1BYTE, targetReg); - emit->emitIns_R_R(INS_movzx, EA_1BYTE, targetReg, targetReg); - break; - } - - case NI_SSE41_TestNotZAndNotC: - { - assert(HWIntrinsicInfo::lookupIns(intrinsicId, node->gtSIMDBaseType) == INS_ptest); - genHWIntrinsic_R_RM(node, INS_ptest, emitTypeSize(TYP_SIMD16)); - emit->emitIns_R(INS_seta, EA_1BYTE, targetReg); - emit->emitIns_R_R(INS_movzx, EA_1BYTE, targetReg, targetReg); - break; - } - case NI_SSE41_Extract: case NI_SSE41_X64_Extract: { @@ -1923,21 +1711,16 @@ void CodeGen::genSSE42Intrinsic(GenTreeHWIntrinsic* node) emit->emitIns_R_R(INS_mov, emitTypeSize(targetType), targetReg, op1Reg); } - // This makes the genHWIntrinsic_R_RM code much simpler, as we don't need an - // overload that explicitly takes the operands. - node->gtOp1 = op2; - node->gtOp2 = nullptr; - if ((baseType == TYP_UBYTE) || (baseType == TYP_USHORT)) // baseType is the type of the second argument { assert(targetType == TYP_INT); - genHWIntrinsic_R_RM(node, INS_crc32, emitTypeSize(baseType)); + genHWIntrinsic_R_RM(node, INS_crc32, emitTypeSize(baseType), targetReg, op2); } else { assert(op1->TypeGet() == op2->TypeGet()); assert((targetType == TYP_INT) || (targetType == TYP_LONG)); - genHWIntrinsic_R_RM(node, INS_crc32, emitTypeSize(targetType)); + genHWIntrinsic_R_RM(node, INS_crc32, emitTypeSize(targetType), targetReg, op2); } break; @@ -2004,7 +1787,7 @@ void CodeGen::genAvxOrAvx2Intrinsic(GenTreeHWIntrinsic* node) } else { - genHWIntrinsic_R_RM(node, ins, emitTypeSize(TYP_SIMD32)); + genHWIntrinsic_R_RM(node, ins, EA_32BYTE, targetReg, op1); } break; } @@ -2116,30 +1899,6 @@ void CodeGen::genAvxOrAvx2Intrinsic(GenTreeHWIntrinsic* node) break; } - case NI_AVX_TestC: - { - genHWIntrinsic_R_RM(node, ins, attr); - emit->emitIns_R(INS_setb, EA_1BYTE, targetReg); - emit->emitIns_R_R(INS_movzx, EA_1BYTE, targetReg, targetReg); - break; - } - - case NI_AVX_TestNotZAndNotC: - { - genHWIntrinsic_R_RM(node, ins, attr); - emit->emitIns_R(INS_seta, EA_1BYTE, targetReg); - emit->emitIns_R_R(INS_movzx, EA_1BYTE, targetReg, targetReg); - break; - } - - case NI_AVX_TestZ: - { - genHWIntrinsic_R_RM(node, ins, attr); - emit->emitIns_R(INS_sete, EA_1BYTE, targetReg); - emit->emitIns_R_R(INS_movzx, EA_1BYTE, targetReg, targetReg); - break; - } - default: unreached(); break; @@ -2208,7 +1967,7 @@ void CodeGen::genBMI1OrBMI2Intrinsic(GenTreeHWIntrinsic* node) { assert(op2 == nullptr); assert((targetType == TYP_INT) || (targetType == TYP_LONG)); - genHWIntrinsic_R_RM(node, ins, emitTypeSize(node->TypeGet())); + genHWIntrinsic_R_RM(node, ins, emitTypeSize(node->TypeGet()), targetReg, op1); break; } @@ -2467,7 +2226,7 @@ void CodeGen::genXCNTIntrinsic(GenTreeHWIntrinsic* node, instruction ins) { GetEmitter()->emitIns_R_R(INS_xor, EA_4BYTE, targetReg, targetReg); } - genHWIntrinsic_R_RM(node, ins, emitTypeSize(node->TypeGet())); + genHWIntrinsic_R_RM(node, ins, emitTypeSize(node->TypeGet()), targetReg, op1); } #endif // FEATURE_HW_INTRINSICS diff --git a/src/coreclr/src/jit/hwintrinsiclistxarch.h b/src/coreclr/src/jit/hwintrinsiclistxarch.h index c00a92e21cdab2c6dbfa9f23d93c31b5c9e6241f..ec9de09b58ccd8baca80cfc2de04a761227eaf51 100644 --- a/src/coreclr/src/jit/hwintrinsiclistxarch.h +++ b/src/coreclr/src/jit/hwintrinsiclistxarch.h @@ -81,29 +81,29 @@ HARDWARE_INTRINSIC(SSE_AddScalar, "AddScalar", HARDWARE_INTRINSIC(SSE_And, "And", SSE, -1, 16, 2, {INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_andps, INS_invalid}, HW_Category_SimpleSIMD, HW_Flag_Commutative) HARDWARE_INTRINSIC(SSE_AndNot, "AndNot", SSE, -1, 16, 2, {INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_andnps, INS_invalid}, HW_Category_SimpleSIMD, HW_Flag_NoFlag) HARDWARE_INTRINSIC(SSE_CompareEqual, "CompareEqual", SSE, 0, 16, 2, {INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_cmpps, INS_invalid}, HW_Category_SimpleSIMD, HW_Flag_Commutative) -HARDWARE_INTRINSIC(SSE_CompareScalarOrderedEqual, "CompareScalarOrderedEqual", SSE, -1, 16, 2, {INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_comiss, INS_invalid}, HW_Category_SIMDScalar, HW_Flag_Commutative|HW_Flag_MultiIns|HW_Flag_BaseTypeFromFirstArg|HW_Flag_NoRMWSemantics) +HARDWARE_INTRINSIC(SSE_CompareScalarOrderedEqual, "CompareScalarOrderedEqual", SSE, -1, 16, 2, {INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_comiss, INS_invalid}, HW_Category_SIMDScalar, HW_Flag_Commutative|HW_Flag_BaseTypeFromFirstArg|HW_Flag_NoRMWSemantics) HARDWARE_INTRINSIC(SSE_CompareScalarEqual, "CompareScalarEqual", SSE, 0, 16, 2, {INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_cmpss, INS_invalid}, HW_Category_SIMDScalar, HW_Flag_CopyUpperBits) -HARDWARE_INTRINSIC(SSE_CompareScalarUnorderedEqual, "CompareScalarUnorderedEqual", SSE, -1, 16, 2, {INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_ucomiss, INS_invalid}, HW_Category_SIMDScalar, HW_Flag_Commutative|HW_Flag_MultiIns|HW_Flag_BaseTypeFromFirstArg|HW_Flag_NoRMWSemantics) +HARDWARE_INTRINSIC(SSE_CompareScalarUnorderedEqual, "CompareScalarUnorderedEqual", SSE, -1, 16, 2, {INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_ucomiss, INS_invalid}, HW_Category_SIMDScalar, HW_Flag_Commutative|HW_Flag_BaseTypeFromFirstArg|HW_Flag_NoRMWSemantics) HARDWARE_INTRINSIC(SSE_CompareGreaterThan, "CompareGreaterThan", SSE, 6, 16, 2, {INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_cmpps, INS_invalid}, HW_Category_SimpleSIMD, HW_Flag_NoFlag) -HARDWARE_INTRINSIC(SSE_CompareScalarOrderedGreaterThan, "CompareScalarOrderedGreaterThan", SSE, -1, 16, 2, {INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_comiss, INS_invalid}, HW_Category_SIMDScalar, HW_Flag_MultiIns|HW_Flag_BaseTypeFromFirstArg|HW_Flag_NoRMWSemantics) +HARDWARE_INTRINSIC(SSE_CompareScalarOrderedGreaterThan, "CompareScalarOrderedGreaterThan", SSE, -1, 16, 2, {INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_comiss, INS_invalid}, HW_Category_SIMDScalar, HW_Flag_BaseTypeFromFirstArg|HW_Flag_NoRMWSemantics) HARDWARE_INTRINSIC(SSE_CompareScalarGreaterThan, "CompareScalarGreaterThan", SSE, 6, 16, 2, {INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_cmpss, INS_invalid}, HW_Category_SIMDScalar, HW_Flag_CopyUpperBits) -HARDWARE_INTRINSIC(SSE_CompareScalarUnorderedGreaterThan, "CompareScalarUnorderedGreaterThan", SSE, -1, 16, 2, {INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_ucomiss, INS_invalid}, HW_Category_SIMDScalar, HW_Flag_MultiIns|HW_Flag_BaseTypeFromFirstArg|HW_Flag_NoRMWSemantics) +HARDWARE_INTRINSIC(SSE_CompareScalarUnorderedGreaterThan, "CompareScalarUnorderedGreaterThan", SSE, -1, 16, 2, {INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_ucomiss, INS_invalid}, HW_Category_SIMDScalar, HW_Flag_BaseTypeFromFirstArg|HW_Flag_NoRMWSemantics) HARDWARE_INTRINSIC(SSE_CompareGreaterThanOrEqual, "CompareGreaterThanOrEqual", SSE, 5, 16, 2, {INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_cmpps, INS_invalid}, HW_Category_SimpleSIMD, HW_Flag_NoFlag) -HARDWARE_INTRINSIC(SSE_CompareScalarOrderedGreaterThanOrEqual, "CompareScalarOrderedGreaterThanOrEqual", SSE, -1, 16, 2, {INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_comiss, INS_invalid}, HW_Category_SIMDScalar, HW_Flag_MultiIns|HW_Flag_BaseTypeFromFirstArg|HW_Flag_NoRMWSemantics) +HARDWARE_INTRINSIC(SSE_CompareScalarOrderedGreaterThanOrEqual, "CompareScalarOrderedGreaterThanOrEqual", SSE, -1, 16, 2, {INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_comiss, INS_invalid}, HW_Category_SIMDScalar, HW_Flag_BaseTypeFromFirstArg|HW_Flag_NoRMWSemantics) HARDWARE_INTRINSIC(SSE_CompareScalarGreaterThanOrEqual, "CompareScalarGreaterThanOrEqual", SSE, 5, 16, 2, {INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_cmpss, INS_invalid}, HW_Category_SIMDScalar, HW_Flag_CopyUpperBits) -HARDWARE_INTRINSIC(SSE_CompareScalarUnorderedGreaterThanOrEqual, "CompareScalarUnorderedGreaterThanOrEqual", SSE, -1, 16, 2, {INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_ucomiss, INS_invalid}, HW_Category_SIMDScalar, HW_Flag_MultiIns|HW_Flag_BaseTypeFromFirstArg|HW_Flag_NoRMWSemantics) +HARDWARE_INTRINSIC(SSE_CompareScalarUnorderedGreaterThanOrEqual, "CompareScalarUnorderedGreaterThanOrEqual", SSE, -1, 16, 2, {INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_ucomiss, INS_invalid}, HW_Category_SIMDScalar, HW_Flag_BaseTypeFromFirstArg|HW_Flag_NoRMWSemantics) HARDWARE_INTRINSIC(SSE_CompareLessThan, "CompareLessThan", SSE, 1, 16, 2, {INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_cmpps, INS_invalid}, HW_Category_SimpleSIMD, HW_Flag_NoFlag) -HARDWARE_INTRINSIC(SSE_CompareScalarOrderedLessThan, "CompareScalarOrderedLessThan", SSE, -1, 16, 2, {INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_comiss, INS_invalid}, HW_Category_SIMDScalar, HW_Flag_MultiIns|HW_Flag_BaseTypeFromFirstArg|HW_Flag_NoRMWSemantics) +HARDWARE_INTRINSIC(SSE_CompareScalarOrderedLessThan, "CompareScalarOrderedLessThan", SSE, -1, 16, 2, {INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_comiss, INS_invalid}, HW_Category_SIMDScalar, HW_Flag_BaseTypeFromFirstArg|HW_Flag_NoRMWSemantics) HARDWARE_INTRINSIC(SSE_CompareScalarLessThan, "CompareScalarLessThan", SSE, 1, 16, 2, {INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_cmpss, INS_invalid}, HW_Category_SIMDScalar, HW_Flag_CopyUpperBits) -HARDWARE_INTRINSIC(SSE_CompareScalarUnorderedLessThan, "CompareScalarUnorderedLessThan", SSE, -1, 16, 2, {INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_ucomiss, INS_invalid}, HW_Category_SIMDScalar, HW_Flag_MultiIns|HW_Flag_BaseTypeFromFirstArg|HW_Flag_NoRMWSemantics) +HARDWARE_INTRINSIC(SSE_CompareScalarUnorderedLessThan, "CompareScalarUnorderedLessThan", SSE, -1, 16, 2, {INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_ucomiss, INS_invalid}, HW_Category_SIMDScalar, HW_Flag_BaseTypeFromFirstArg|HW_Flag_NoRMWSemantics) HARDWARE_INTRINSIC(SSE_CompareLessThanOrEqual, "CompareLessThanOrEqual", SSE, 2, 16, 2, {INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_cmpps, INS_invalid}, HW_Category_SimpleSIMD, HW_Flag_NoFlag) -HARDWARE_INTRINSIC(SSE_CompareScalarOrderedLessThanOrEqual, "CompareScalarOrderedLessThanOrEqual", SSE, -1, 16, 2, {INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_comiss, INS_invalid}, HW_Category_SIMDScalar, HW_Flag_MultiIns|HW_Flag_BaseTypeFromFirstArg|HW_Flag_NoRMWSemantics) +HARDWARE_INTRINSIC(SSE_CompareScalarOrderedLessThanOrEqual, "CompareScalarOrderedLessThanOrEqual", SSE, -1, 16, 2, {INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_comiss, INS_invalid}, HW_Category_SIMDScalar, HW_Flag_BaseTypeFromFirstArg|HW_Flag_NoRMWSemantics) HARDWARE_INTRINSIC(SSE_CompareScalarLessThanOrEqual, "CompareScalarLessThanOrEqual", SSE, 2, 16, 2, {INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_cmpss, INS_invalid}, HW_Category_SIMDScalar, HW_Flag_CopyUpperBits) -HARDWARE_INTRINSIC(SSE_CompareScalarUnorderedLessThanOrEqual, "CompareScalarUnorderedLessThanOrEqual", SSE, -1, 16, 2, {INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_ucomiss, INS_invalid}, HW_Category_SIMDScalar, HW_Flag_MultiIns|HW_Flag_BaseTypeFromFirstArg|HW_Flag_NoRMWSemantics) +HARDWARE_INTRINSIC(SSE_CompareScalarUnorderedLessThanOrEqual, "CompareScalarUnorderedLessThanOrEqual", SSE, -1, 16, 2, {INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_ucomiss, INS_invalid}, HW_Category_SIMDScalar, HW_Flag_BaseTypeFromFirstArg|HW_Flag_NoRMWSemantics) HARDWARE_INTRINSIC(SSE_CompareNotEqual, "CompareNotEqual", SSE, 4, 16, 2, {INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_cmpps, INS_invalid}, HW_Category_SimpleSIMD, HW_Flag_Commutative) -HARDWARE_INTRINSIC(SSE_CompareScalarOrderedNotEqual, "CompareScalarOrderedNotEqual", SSE, -1, 16, 2, {INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_comiss, INS_invalid}, HW_Category_SIMDScalar, HW_Flag_Commutative|HW_Flag_MultiIns|HW_Flag_BaseTypeFromFirstArg|HW_Flag_NoRMWSemantics) +HARDWARE_INTRINSIC(SSE_CompareScalarOrderedNotEqual, "CompareScalarOrderedNotEqual", SSE, -1, 16, 2, {INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_comiss, INS_invalid}, HW_Category_SIMDScalar, HW_Flag_Commutative|HW_Flag_BaseTypeFromFirstArg|HW_Flag_NoRMWSemantics) HARDWARE_INTRINSIC(SSE_CompareScalarNotEqual, "CompareScalarNotEqual", SSE, 4, 16, 2, {INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_cmpss, INS_invalid}, HW_Category_SIMDScalar, HW_Flag_CopyUpperBits) -HARDWARE_INTRINSIC(SSE_CompareScalarUnorderedNotEqual, "CompareScalarUnorderedNotEqual", SSE, -1, 16, 2, {INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_ucomiss, INS_invalid}, HW_Category_SIMDScalar, HW_Flag_Commutative|HW_Flag_MultiIns|HW_Flag_BaseTypeFromFirstArg|HW_Flag_NoRMWSemantics) +HARDWARE_INTRINSIC(SSE_CompareScalarUnorderedNotEqual, "CompareScalarUnorderedNotEqual", SSE, -1, 16, 2, {INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_ucomiss, INS_invalid}, HW_Category_SIMDScalar, HW_Flag_Commutative|HW_Flag_BaseTypeFromFirstArg|HW_Flag_NoRMWSemantics) HARDWARE_INTRINSIC(SSE_CompareNotGreaterThan, "CompareNotGreaterThan", SSE, 2, 16, 2, {INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_cmpps, INS_invalid}, HW_Category_SimpleSIMD, HW_Flag_NoFlag) HARDWARE_INTRINSIC(SSE_CompareScalarNotGreaterThan, "CompareScalarNotGreaterThan", SSE, 2, 16, 2, {INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_cmpss, INS_invalid}, HW_Category_SIMDScalar, HW_Flag_CopyUpperBits) HARDWARE_INTRINSIC(SSE_CompareNotGreaterThanOrEqual, "CompareNotGreaterThanOrEqual", SSE, 1, 16, 2, {INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_cmpps, INS_invalid}, HW_Category_SimpleSIMD, HW_Flag_NoFlag) @@ -182,25 +182,25 @@ HARDWARE_INTRINSIC(SSE2_And, "And", HARDWARE_INTRINSIC(SSE2_AndNot, "AndNot", SSE2, -1, 16, 2, {INS_pandn, INS_pandn, INS_pandn, INS_pandn, INS_pandn, INS_pandn, INS_pandn, INS_pandn, INS_invalid, INS_andnpd}, HW_Category_SimpleSIMD, HW_Flag_NoFlag) HARDWARE_INTRINSIC(SSE2_Average, "Average", SSE2, -1, 16, 2, {INS_invalid, INS_pavgb, INS_invalid, INS_pavgw, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid}, HW_Category_SimpleSIMD, HW_Flag_Commutative) HARDWARE_INTRINSIC(SSE2_CompareEqual, "CompareEqual", SSE2, 0, 16, 2, {INS_pcmpeqb, INS_pcmpeqb, INS_pcmpeqw, INS_pcmpeqw, INS_pcmpeqd, INS_pcmpeqd, INS_invalid, INS_invalid, INS_invalid, INS_cmppd}, HW_Category_SimpleSIMD, HW_Flag_Commutative) -HARDWARE_INTRINSIC(SSE2_CompareScalarOrderedEqual, "CompareScalarOrderedEqual", SSE2, -1, 16, 2, {INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_comisd}, HW_Category_SIMDScalar, HW_Flag_Commutative|HW_Flag_MultiIns|HW_Flag_BaseTypeFromFirstArg|HW_Flag_NoRMWSemantics) +HARDWARE_INTRINSIC(SSE2_CompareScalarOrderedEqual, "CompareScalarOrderedEqual", SSE2, -1, 16, 2, {INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_comisd}, HW_Category_SIMDScalar, HW_Flag_Commutative|HW_Flag_BaseTypeFromFirstArg|HW_Flag_NoRMWSemantics) HARDWARE_INTRINSIC(SSE2_CompareScalarEqual, "CompareScalarEqual", SSE2, 0, 16, 2, {INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_cmpsd}, HW_Category_SIMDScalar, HW_Flag_CopyUpperBits) -HARDWARE_INTRINSIC(SSE2_CompareScalarUnorderedEqual, "CompareScalarUnorderedEqual", SSE2, -1, 16, 2, {INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_ucomisd}, HW_Category_SIMDScalar, HW_Flag_Commutative|HW_Flag_MultiIns|HW_Flag_BaseTypeFromFirstArg|HW_Flag_NoRMWSemantics) +HARDWARE_INTRINSIC(SSE2_CompareScalarUnorderedEqual, "CompareScalarUnorderedEqual", SSE2, -1, 16, 2, {INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_ucomisd}, HW_Category_SIMDScalar, HW_Flag_Commutative|HW_Flag_BaseTypeFromFirstArg|HW_Flag_NoRMWSemantics) HARDWARE_INTRINSIC(SSE2_CompareGreaterThan, "CompareGreaterThan", SSE2, 6, 16, 2, {INS_pcmpgtb, INS_invalid, INS_pcmpgtw, INS_invalid, INS_pcmpgtd, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_cmppd}, HW_Category_SimpleSIMD, HW_Flag_NoFlag) -HARDWARE_INTRINSIC(SSE2_CompareScalarOrderedGreaterThan, "CompareScalarOrderedGreaterThan", SSE2, -1, 16, 2, {INS_pcmpgtb, INS_invalid, INS_pcmpgtw, INS_invalid, INS_pcmpgtd, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_comisd}, HW_Category_SIMDScalar, HW_Flag_MultiIns|HW_Flag_BaseTypeFromFirstArg|HW_Flag_NoRMWSemantics) -HARDWARE_INTRINSIC(SSE2_CompareScalarGreaterThan, "CompareScalarGreaterThan", SSE2, 6, 16, 2, {INS_pcmpgtb, INS_invalid, INS_pcmpgtw, INS_invalid, INS_pcmpgtd, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_cmpsd}, HW_Category_SIMDScalar, HW_Flag_CopyUpperBits) -HARDWARE_INTRINSIC(SSE2_CompareScalarUnorderedGreaterThan, "CompareScalarUnorderedGreaterThan", SSE2, -1, 16, 2, {INS_pcmpgtb, INS_invalid, INS_pcmpgtw, INS_invalid, INS_pcmpgtd, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_ucomisd}, HW_Category_SIMDScalar, HW_Flag_MultiIns|HW_Flag_BaseTypeFromFirstArg|HW_Flag_NoRMWSemantics) +HARDWARE_INTRINSIC(SSE2_CompareScalarOrderedGreaterThan, "CompareScalarOrderedGreaterThan", SSE2, -1, 16, 2, {INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_comisd}, HW_Category_SIMDScalar, HW_Flag_BaseTypeFromFirstArg|HW_Flag_NoRMWSemantics) +HARDWARE_INTRINSIC(SSE2_CompareScalarGreaterThan, "CompareScalarGreaterThan", SSE2, 6, 16, 2, {INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_cmpsd}, HW_Category_SIMDScalar, HW_Flag_CopyUpperBits) +HARDWARE_INTRINSIC(SSE2_CompareScalarUnorderedGreaterThan, "CompareScalarUnorderedGreaterThan", SSE2, -1, 16, 2, {INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_ucomisd}, HW_Category_SIMDScalar, HW_Flag_BaseTypeFromFirstArg|HW_Flag_NoRMWSemantics) HARDWARE_INTRINSIC(SSE2_CompareGreaterThanOrEqual, "CompareGreaterThanOrEqual", SSE2, 5, 16, 2, {INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_cmppd}, HW_Category_SimpleSIMD, HW_Flag_NoFlag) -HARDWARE_INTRINSIC(SSE2_CompareScalarOrderedGreaterThanOrEqual, "CompareScalarOrderedGreaterThanOrEqual", SSE2, -1, 16, 2, {INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_comisd}, HW_Category_SIMDScalar, HW_Flag_MultiIns|HW_Flag_BaseTypeFromFirstArg|HW_Flag_NoRMWSemantics) +HARDWARE_INTRINSIC(SSE2_CompareScalarOrderedGreaterThanOrEqual, "CompareScalarOrderedGreaterThanOrEqual", SSE2, -1, 16, 2, {INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_comisd}, HW_Category_SIMDScalar, HW_Flag_BaseTypeFromFirstArg|HW_Flag_NoRMWSemantics) HARDWARE_INTRINSIC(SSE2_CompareScalarGreaterThanOrEqual, "CompareScalarGreaterThanOrEqual", SSE2, 5, 16, 2, {INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_cmpsd}, HW_Category_SIMDScalar, HW_Flag_CopyUpperBits) -HARDWARE_INTRINSIC(SSE2_CompareScalarUnorderedGreaterThanOrEqual, "CompareScalarUnorderedGreaterThanOrEqual", SSE2, -1, 16, 2, {INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_ucomisd}, HW_Category_SIMDScalar, HW_Flag_MultiIns|HW_Flag_BaseTypeFromFirstArg|HW_Flag_NoRMWSemantics) +HARDWARE_INTRINSIC(SSE2_CompareScalarUnorderedGreaterThanOrEqual, "CompareScalarUnorderedGreaterThanOrEqual", SSE2, -1, 16, 2, {INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_ucomisd}, HW_Category_SIMDScalar, HW_Flag_BaseTypeFromFirstArg|HW_Flag_NoRMWSemantics) HARDWARE_INTRINSIC(SSE2_CompareLessThan, "CompareLessThan", SSE2, 1, 16, 2, {INS_pcmpgtb, INS_invalid, INS_pcmpgtw, INS_invalid, INS_pcmpgtd, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_cmppd}, HW_Category_Special, HW_Flag_NoFlag) -HARDWARE_INTRINSIC(SSE2_CompareScalarOrderedLessThan, "CompareScalarOrderedLessThan", SSE2, -1, 16, 2, {INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_comisd}, HW_Category_SIMDScalar, HW_Flag_MultiIns|HW_Flag_BaseTypeFromFirstArg|HW_Flag_NoRMWSemantics) +HARDWARE_INTRINSIC(SSE2_CompareScalarOrderedLessThan, "CompareScalarOrderedLessThan", SSE2, -1, 16, 2, {INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_comisd}, HW_Category_SIMDScalar, HW_Flag_BaseTypeFromFirstArg|HW_Flag_NoRMWSemantics) HARDWARE_INTRINSIC(SSE2_CompareScalarLessThan, "CompareScalarLessThan", SSE2, 1, 16, 2, {INS_pcmpgtb, INS_invalid, INS_pcmpgtw, INS_invalid, INS_pcmpgtd, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_cmpsd}, HW_Category_SIMDScalar, HW_Flag_CopyUpperBits) -HARDWARE_INTRINSIC(SSE2_CompareScalarUnorderedLessThan, "CompareScalarUnorderedLessThan", SSE2, -1, 16, 2, {INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_ucomisd}, HW_Category_SIMDScalar, HW_Flag_MultiIns|HW_Flag_BaseTypeFromFirstArg|HW_Flag_NoRMWSemantics) +HARDWARE_INTRINSIC(SSE2_CompareScalarUnorderedLessThan, "CompareScalarUnorderedLessThan", SSE2, -1, 16, 2, {INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_ucomisd}, HW_Category_SIMDScalar, HW_Flag_BaseTypeFromFirstArg|HW_Flag_NoRMWSemantics) HARDWARE_INTRINSIC(SSE2_CompareLessThanOrEqual, "CompareLessThanOrEqual", SSE2, 2, 16, 2, {INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_cmppd}, HW_Category_SimpleSIMD, HW_Flag_NoFlag) -HARDWARE_INTRINSIC(SSE2_CompareScalarOrderedLessThanOrEqual, "CompareScalarOrderedLessThanOrEqual", SSE2, -1, 16, 2, {INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_comisd}, HW_Category_SIMDScalar, HW_Flag_MultiIns|HW_Flag_BaseTypeFromFirstArg|HW_Flag_NoRMWSemantics) +HARDWARE_INTRINSIC(SSE2_CompareScalarOrderedLessThanOrEqual, "CompareScalarOrderedLessThanOrEqual", SSE2, -1, 16, 2, {INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_comisd}, HW_Category_SIMDScalar, HW_Flag_BaseTypeFromFirstArg|HW_Flag_NoRMWSemantics) HARDWARE_INTRINSIC(SSE2_CompareScalarLessThanOrEqual, "CompareScalarLessThanOrEqual", SSE2, 2, 16, 2, {INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_cmpsd}, HW_Category_SIMDScalar, HW_Flag_CopyUpperBits) -HARDWARE_INTRINSIC(SSE2_CompareScalarUnorderedLessThanOrEqual, "CompareScalarUnorderedLessThanOrEqual", SSE2, -1, 16, 2, {INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_ucomisd}, HW_Category_SIMDScalar, HW_Flag_MultiIns|HW_Flag_BaseTypeFromFirstArg|HW_Flag_NoRMWSemantics) +HARDWARE_INTRINSIC(SSE2_CompareScalarUnorderedLessThanOrEqual, "CompareScalarUnorderedLessThanOrEqual", SSE2, -1, 16, 2, {INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_ucomisd}, HW_Category_SIMDScalar, HW_Flag_BaseTypeFromFirstArg|HW_Flag_NoRMWSemantics) HARDWARE_INTRINSIC(SSE2_CompareNotEqual, "CompareNotEqual", SSE2, 4, 16, 2, {INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_cmppd}, HW_Category_SimpleSIMD, HW_Flag_Commutative) HARDWARE_INTRINSIC(SSE2_CompareScalarOrderedNotEqual, "CompareScalarOrderedNotEqual", SSE2, -1, 16, 2, {INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_comisd}, HW_Category_SIMDScalar, HW_Flag_Commutative|HW_Flag_MultiIns|HW_Flag_BaseTypeFromFirstArg|HW_Flag_NoRMWSemantics) HARDWARE_INTRINSIC(SSE2_CompareScalarNotEqual, "CompareScalarNotEqual", SSE2, 4, 16, 2, {INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_cmpsd}, HW_Category_SIMDScalar, HW_Flag_CopyUpperBits) @@ -358,9 +358,9 @@ HARDWARE_INTRINSIC(SSE41_RoundToPositiveInfinity, "RoundToPosi HARDWARE_INTRINSIC(SSE41_RoundToPositiveInfinityScalar, "RoundToPositiveInfinityScalar", SSE41, 10, 16, -1, {INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_roundss, INS_roundsd}, HW_Category_SIMDScalar, HW_Flag_CopyUpperBits) HARDWARE_INTRINSIC(SSE41_RoundToZero, "RoundToZero", SSE41, 11, 16, 1, {INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_roundps, INS_roundpd}, HW_Category_SimpleSIMD, HW_Flag_NoRMWSemantics) HARDWARE_INTRINSIC(SSE41_RoundToZeroScalar, "RoundToZeroScalar", SSE41, 11, 16, -1, {INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_roundss, INS_roundsd}, HW_Category_SIMDScalar, HW_Flag_CopyUpperBits) -HARDWARE_INTRINSIC(SSE41_TestC, "TestC", SSE41, -1, 16, 2, {INS_ptest, INS_ptest, INS_ptest, INS_ptest, INS_ptest, INS_ptest, INS_ptest, INS_ptest, INS_invalid, INS_invalid}, HW_Category_SimpleSIMD, HW_Flag_MultiIns|HW_Flag_BaseTypeFromFirstArg) -HARDWARE_INTRINSIC(SSE41_TestNotZAndNotC, "TestNotZAndNotC", SSE41, -1, 16, 2, {INS_ptest, INS_ptest, INS_ptest, INS_ptest, INS_ptest, INS_ptest, INS_ptest, INS_ptest, INS_invalid, INS_invalid}, HW_Category_SimpleSIMD, HW_Flag_MultiIns|HW_Flag_BaseTypeFromFirstArg) -HARDWARE_INTRINSIC(SSE41_TestZ, "TestZ", SSE41, -1, 16, 2, {INS_ptest, INS_ptest, INS_ptest, INS_ptest, INS_ptest, INS_ptest, INS_ptest, INS_ptest, INS_invalid, INS_invalid}, HW_Category_SimpleSIMD, HW_Flag_MultiIns|HW_Flag_BaseTypeFromFirstArg) +HARDWARE_INTRINSIC(SSE41_TestC, "TestC", SSE41, -1, 16, 2, {INS_ptest, INS_ptest, INS_ptest, INS_ptest, INS_ptest, INS_ptest, INS_ptest, INS_ptest, INS_invalid, INS_invalid}, HW_Category_SimpleSIMD, HW_Flag_BaseTypeFromFirstArg) +HARDWARE_INTRINSIC(SSE41_TestNotZAndNotC, "TestNotZAndNotC", SSE41, -1, 16, 2, {INS_ptest, INS_ptest, INS_ptest, INS_ptest, INS_ptest, INS_ptest, INS_ptest, INS_ptest, INS_invalid, INS_invalid}, HW_Category_SimpleSIMD, HW_Flag_BaseTypeFromFirstArg) +HARDWARE_INTRINSIC(SSE41_TestZ, "TestZ", SSE41, -1, 16, 2, {INS_ptest, INS_ptest, INS_ptest, INS_ptest, INS_ptest, INS_ptest, INS_ptest, INS_ptest, INS_invalid, INS_invalid}, HW_Category_SimpleSIMD, HW_Flag_BaseTypeFromFirstArg) // *************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************** // Intrinsic ID Function name ISA ival SIMD size NumArg instructions Category Flags @@ -444,9 +444,9 @@ HARDWARE_INTRINSIC(AVX_Store, "Store", HARDWARE_INTRINSIC(AVX_StoreAligned, "StoreAligned", AVX, -1, 32, 2, {INS_movdqa, INS_movdqa, INS_movdqa, INS_movdqa, INS_movdqa, INS_movdqa, INS_movdqa, INS_movdqa, INS_movaps, INS_movapd}, HW_Category_MemoryStore, HW_Flag_NoRMWSemantics) HARDWARE_INTRINSIC(AVX_StoreAlignedNonTemporal, "StoreAlignedNonTemporal", AVX, -1, 32, 2, {INS_movntdq, INS_movntdq, INS_movntdq, INS_movntdq, INS_movntdq, INS_movntdq, INS_movntdq, INS_movntdq, INS_movntps, INS_movntpd}, HW_Category_MemoryStore, HW_Flag_NoRMWSemantics) HARDWARE_INTRINSIC(AVX_Subtract, "Subtract", AVX, -1, 32, 2, {INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_subps, INS_subpd}, HW_Category_SimpleSIMD, HW_Flag_NoFlag) -HARDWARE_INTRINSIC(AVX_TestC, "TestC", AVX, -1, 0, 2, {INS_ptest, INS_ptest, INS_ptest, INS_ptest, INS_ptest, INS_ptest, INS_ptest, INS_ptest, INS_vtestps, INS_vtestpd}, HW_Category_SimpleSIMD, HW_Flag_UnfixedSIMDSize|HW_Flag_MultiIns|HW_Flag_BaseTypeFromFirstArg) -HARDWARE_INTRINSIC(AVX_TestNotZAndNotC, "TestNotZAndNotC", AVX, -1, 0, 2, {INS_ptest, INS_ptest, INS_ptest, INS_ptest, INS_ptest, INS_ptest, INS_ptest, INS_ptest, INS_vtestps, INS_vtestpd}, HW_Category_SimpleSIMD, HW_Flag_UnfixedSIMDSize|HW_Flag_MultiIns|HW_Flag_BaseTypeFromFirstArg) -HARDWARE_INTRINSIC(AVX_TestZ, "TestZ", AVX, -1, 0, 2, {INS_ptest, INS_ptest, INS_ptest, INS_ptest, INS_ptest, INS_ptest, INS_ptest, INS_ptest, INS_vtestps, INS_vtestpd}, HW_Category_SimpleSIMD, HW_Flag_UnfixedSIMDSize|HW_Flag_MultiIns|HW_Flag_BaseTypeFromFirstArg) +HARDWARE_INTRINSIC(AVX_TestC, "TestC", AVX, -1, 0, 2, {INS_ptest, INS_ptest, INS_ptest, INS_ptest, INS_ptest, INS_ptest, INS_ptest, INS_ptest, INS_vtestps, INS_vtestpd}, HW_Category_SimpleSIMD, HW_Flag_UnfixedSIMDSize|HW_Flag_BaseTypeFromFirstArg) +HARDWARE_INTRINSIC(AVX_TestNotZAndNotC, "TestNotZAndNotC", AVX, -1, 0, 2, {INS_ptest, INS_ptest, INS_ptest, INS_ptest, INS_ptest, INS_ptest, INS_ptest, INS_ptest, INS_vtestps, INS_vtestpd}, HW_Category_SimpleSIMD, HW_Flag_UnfixedSIMDSize|HW_Flag_BaseTypeFromFirstArg) +HARDWARE_INTRINSIC(AVX_TestZ, "TestZ", AVX, -1, 0, 2, {INS_ptest, INS_ptest, INS_ptest, INS_ptest, INS_ptest, INS_ptest, INS_ptest, INS_ptest, INS_vtestps, INS_vtestpd}, HW_Category_SimpleSIMD, HW_Flag_UnfixedSIMDSize|HW_Flag_BaseTypeFromFirstArg) HARDWARE_INTRINSIC(AVX_UnpackHigh, "UnpackHigh", AVX, -1, 32, 2, {INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_unpckhps, INS_unpckhpd}, HW_Category_SimpleSIMD, HW_Flag_NoFlag) HARDWARE_INTRINSIC(AVX_UnpackLow, "UnpackLow", AVX, -1, 32, 2, {INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_unpcklps, INS_unpcklpd}, HW_Category_SimpleSIMD, HW_Flag_NoFlag) HARDWARE_INTRINSIC(AVX_Xor, "Xor", AVX, -1, 32, 2, {INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_xorps, INS_xorpd}, HW_Category_SimpleSIMD, HW_Flag_Commutative) @@ -629,6 +629,14 @@ HARDWARE_INTRINSIC(POPCNT_PopCount, "PopCount", // POPCNT Intrinsics HARDWARE_INTRINSIC(POPCNT_X64_PopCount, "PopCount", POPCNT_X64, -1, 0, 1, {INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_popcnt, INS_invalid, INS_invalid}, HW_Category_Scalar, HW_Flag_NoFloatingPointUsed|HW_Flag_NoRMWSemantics|HW_Flag_MultiIns) +// Special intrinsics that are generated during lowering +HARDWARE_INTRINSIC(SSE_COMISS, "COMISS", SSE, -1, 16, 2, {INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_comiss, INS_invalid}, HW_Category_SIMDScalar, HW_Flag_NoRMWSemantics) +HARDWARE_INTRINSIC(SSE_UCOMISS, "UCOMISS", SSE, -1, 16, 2, {INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_ucomiss, INS_invalid}, HW_Category_SIMDScalar, HW_Flag_NoRMWSemantics) +HARDWARE_INTRINSIC(SSE2_COMISD, "COMISD", SSE2, -1, 16, 2, {INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_comisd}, HW_Category_SIMDScalar, HW_Flag_NoRMWSemantics) +HARDWARE_INTRINSIC(SSE2_UCOMISD, "UCOMISD", SSE2, -1, 16, 2, {INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_invalid, INS_ucomisd}, HW_Category_SIMDScalar, HW_Flag_NoRMWSemantics) +HARDWARE_INTRINSIC(SSE41_PTEST, "PTEST", SSE41, -1, 16, 2, {INS_ptest, INS_ptest, INS_ptest, INS_ptest, INS_ptest, INS_ptest, INS_ptest, INS_ptest, INS_invalid, INS_invalid}, HW_Category_SimpleSIMD, HW_Flag_NoRMWSemantics) +HARDWARE_INTRINSIC(AVX_PTEST, "PTEST", AVX, -1, 0, 2, {INS_ptest, INS_ptest, INS_ptest, INS_ptest, INS_ptest, INS_ptest, INS_ptest, INS_ptest, INS_vtestps, INS_vtestpd}, HW_Category_SimpleSIMD, HW_Flag_NoRMWSemantics) + #endif // FEATURE_HW_INTRINSIC #undef HARDWARE_INTRINSIC diff --git a/src/coreclr/src/jit/lower.cpp b/src/coreclr/src/jit/lower.cpp index 72f36fabf8c4452b57c423ebc7b3ac75d190d86c..65a2e56857d2bca9739ab45be6c151bd1c6ed5d8 100644 --- a/src/coreclr/src/jit/lower.cpp +++ b/src/coreclr/src/jit/lower.cpp @@ -2974,6 +2974,109 @@ GenTree* Lowering::LowerJTrue(GenTreeOp* jtrue) return nullptr; } +//---------------------------------------------------------------------------------------------- +// LowerNodeCC: Lowers a node that produces a boolean value by setting the condition flags. +// +// Arguments: +// node - The node to lower +// condition - The condition code of the generated SETCC/JCC node +// +// Return Value: +// A SETCC/JCC node or nullptr if `node` is not used. +// +// Notes: +// This simply replaces `node`'s use with an appropiate SETCC/JCC node, +// `node` is not actually changed, except by having its GTF_SET_FLAGS set. +// It's the caller's responsibility to change `node` such that it only +// sets the condition flags, without producing a boolean value. +// +GenTreeCC* Lowering::LowerNodeCC(GenTree* node, GenCondition condition) +{ + // Skip over a chain of EQ/NE(x, 0) relops. This may be present either + // because `node` is not a relop and so it cannot be used directly by a + // JTRUE, or because the frontend failed to remove a EQ/NE(x, 0) that's + // used as logical negation. + // + // Usually there's only one such relop but there's little difference + // between removing one or all so we may as well remove them all. + // + // We can't allow any other nodes between `node` and its user because we + // have no way of knowing if those nodes change flags or not. So we're looking + // to skip over a sequence of appropriately connected zero and EQ/NE nodes. + + // The x in EQ/NE(x, 0) + GenTree* relop = node; + // The first node of the relop sequence + GenTree* first = node->gtNext; + // The node following the relop sequence + GenTree* next = first; + + while ((next != nullptr) && next->IsIntegralConst(0) && (next->gtNext != nullptr) && + next->gtNext->OperIs(GT_EQ, GT_NE) && (next->gtNext->AsOp()->gtGetOp1() == relop) && + (next->gtNext->AsOp()->gtGetOp2() == next)) + { + relop = next->gtNext; + next = relop->gtNext; + + if (relop->OperIs(GT_EQ)) + { + condition = GenCondition::Reverse(condition); + } + } + + GenTreeCC* cc = nullptr; + + // Next may be null if `node` is not used. In that case we don't need to generate a SETCC node. + if (next != nullptr) + { + if (next->OperIs(GT_JTRUE)) + { + // If the instruction immediately following 'relop', i.e. 'next' is a conditional branch, + // it should always have 'relop' as its 'op1'. If it doesn't, then we have improperly + // constructed IL (the setting of a condition code should always immediately precede its + // use, since the JIT doesn't track dataflow for condition codes). Still, if it happens + // it's not our problem, it simply means that `node` is not used and can be removed. + if (next->AsUnOp()->gtGetOp1() == relop) + { + assert(relop->OperIsCompare()); + + next->ChangeOper(GT_JCC); + cc = next->AsCC(); + cc->gtCondition = condition; + } + } + else + { + // If the node is used by something other than a JTRUE then we need to insert a + // SETCC node to materialize the boolean value. + LIR::Use use; + + if (BlockRange().TryGetUse(relop, &use)) + { + cc = new (comp, GT_SETCC) GenTreeCC(GT_SETCC, condition, TYP_INT); + BlockRange().InsertAfter(node, cc); + use.ReplaceWith(comp, cc); + } + } + } + + if (cc != nullptr) + { + node->gtFlags |= GTF_SET_FLAGS; + cc->gtFlags |= GTF_USE_FLAGS; + } + + // Remove the chain of EQ/NE(x, 0) relop nodes, if any. Note that if a SETCC was + // inserted after `node`, `first` still points to the node that was initially + // after `node`. + if (relop != node) + { + BlockRange().Remove(first, relop); + } + + return cc; +} + // Lower "jmp " tail call to insert PInvoke method epilog if required. void Lowering::LowerJmpMethod(GenTree* jmp) { diff --git a/src/coreclr/src/jit/lower.h b/src/coreclr/src/jit/lower.h index 35fbe3db87c498030624a36bd9f70d56313b98c8..4d90a6b16ba6567def6ef41bdf49e58ebaf3bd41 100644 --- a/src/coreclr/src/jit/lower.h +++ b/src/coreclr/src/jit/lower.h @@ -130,6 +130,7 @@ private: GenTree* OptimizeConstCompare(GenTree* cmp); GenTree* LowerCompare(GenTree* cmp); GenTree* LowerJTrue(GenTreeOp* jtrue); + GenTreeCC* LowerNodeCC(GenTree* node, GenCondition condition); void LowerJmpMethod(GenTree* jmp); void LowerRet(GenTree* ret); GenTree* LowerDelegateInvoke(GenTreeCall* call); @@ -309,6 +310,7 @@ private: #endif // FEATURE_SIMD #ifdef FEATURE_HW_INTRINSICS void LowerHWIntrinsic(GenTreeHWIntrinsic* node); + void LowerHWIntrinsicCC(GenTreeHWIntrinsic* node, NamedIntrinsic newIntrinsicId, GenCondition condition); #endif // FEATURE_HW_INTRINSICS // Utility functions diff --git a/src/coreclr/src/jit/lowerxarch.cpp b/src/coreclr/src/jit/lowerxarch.cpp index d3a27c347fbd2ad28b3f66c41965e0511afba2bd..3b305275516d3a526655f44737844f5a9384b529 100644 --- a/src/coreclr/src/jit/lowerxarch.cpp +++ b/src/coreclr/src/jit/lowerxarch.cpp @@ -837,77 +837,101 @@ void Lowering::LowerSIMD(GenTreeSIMD* simdNode) } else if (simdNode->IsSIMDEqualityOrInequality()) { - LIR::Use simdUse; + LowerNodeCC(simdNode, + simdNode->gtSIMDIntrinsicID == SIMDIntrinsicOpEquality ? GenCondition::EQ : GenCondition::NE); - if (BlockRange().TryGetUse(simdNode, &simdUse)) - { - // - // Try to transform JTRUE(EQ|NE(SIMD(x, y), 0|1)) into - // JCC(SIMD(x, y)). SIMD(x, y) - // is expected to set the Zero flag appropriately. - // All the involved nodes must form a continuous range, there's no other way to - // guarantee that condition flags aren't changed between the SIMD node and the JCC - // node. - // + simdNode->gtType = TYP_VOID; + simdNode->ClearUnusedValue(); + } +#endif + ContainCheckSIMD(simdNode); +} +#endif // FEATURE_SIMD - bool transformed = false; - GenTree* simdUser = simdUse.User(); +#ifdef FEATURE_HW_INTRINSICS - if (simdUser->OperIs(GT_EQ, GT_NE) && simdUser->gtGetOp2()->IsCnsIntOrI() && - (simdNode->gtNext == simdUser->gtGetOp2()) && (simdUser->gtGetOp2()->gtNext == simdUser)) +//---------------------------------------------------------------------------------------------- +// LowerHWIntrinsicCC: Lowers a hardware intrinsic node that produces a boolean value by +// setting the condition flags. +// +// Arguments: +// node - The hardware intrinsic node +// newIntrinsicId - The intrinsic id of the lowered intrinsic node +// condition - The condition code of the generated SETCC/JCC node +// +void Lowering::LowerHWIntrinsicCC(GenTreeHWIntrinsic* node, NamedIntrinsic newIntrinsicId, GenCondition condition) +{ + GenTreeCC* cc = LowerNodeCC(node, condition); + + node->gtHWIntrinsicId = newIntrinsicId; + node->gtType = TYP_VOID; + node->ClearUnusedValue(); + + bool swapOperands = false; + bool canSwapOperands = false; + + switch (newIntrinsicId) + { + case NI_SSE_COMISS: + case NI_SSE_UCOMISS: + case NI_SSE2_COMISD: + case NI_SSE2_UCOMISD: + // In some cases we can generate better code if we swap the operands: + // - If the condition is not one of the "preferred" floating point conditions we can swap + // the operands and change the condition to avoid generating an extra JP/JNP branch. + // - If the first operand can be contained but the second cannot, we can swap operands in + // order to be able to contain the first operand and avoid the need for a temp reg. + // We can't handle both situations at the same time and since an extra branch is likely to + // be worse than an extra temp reg (x64 has a reasonable number of XMM registers) we'll favor + // the branch case: + // - If the condition is not preferred then swap, even if doing this will later prevent + // containment. + // - Allow swapping for containment purposes only if this doesn't result in a non-"preferred" + // condition being generated. + if ((cc != nullptr) && cc->gtCondition.PreferSwap()) { - ssize_t relopOp2Value = simdUser->gtGetOp2()->AsIntCon()->IconValue(); - - if ((relopOp2Value == 0) || (relopOp2Value == 1)) - { - GenTree* jtrue = simdUser->gtNext; + swapOperands = true; + } + else + { + canSwapOperands = (cc == nullptr) || !GenCondition::Swap(cc->gtCondition).PreferSwap(); + } + break; - if ((jtrue != nullptr) && jtrue->OperIs(GT_JTRUE) && (jtrue->gtGetOp1() == simdUser)) - { - if ((simdNode->gtSIMDIntrinsicID == SIMDIntrinsicOpEquality) != simdUser->OperIs(GT_EQ)) - { - relopOp2Value ^= 1; - } + case NI_SSE41_PTEST: + case NI_AVX_PTEST: + // If we need the Carry flag then we can't swap operands. + canSwapOperands = (cc == nullptr) || cc->gtCondition.Is(GenCondition::EQ, GenCondition::NE); + break; - jtrue->ChangeOper(GT_JCC); - GenTreeCC* jcc = jtrue->AsCC(); - jcc->gtFlags |= GTF_USE_FLAGS; - jcc->gtCondition = (relopOp2Value == 0) ? GenCondition::NE : GenCondition::EQ; + default: + unreached(); + } - BlockRange().Remove(simdUser->gtGetOp2()); - BlockRange().Remove(simdUser); - transformed = true; - } - } - } + if (canSwapOperands) + { + bool op1SupportsRegOptional = false; + bool op2SupportsRegOptional = false; - if (!transformed) - { - // - // The code generated for SIMD SIMD(x, y) nodes sets - // the Zero flag like integer compares do so we can simply use SETCC - // to produce the desired result. This avoids the need for subsequent phases - // to have to handle 2 cases (set flags/set destination register). - // - - GenCondition condition = - (simdNode->gtSIMDIntrinsicID == SIMDIntrinsicOpEquality) ? GenCondition::EQ : GenCondition::NE; - GenTreeCC* setcc = new (comp, GT_SETCC) GenTreeCC(GT_SETCC, condition, simdNode->TypeGet()); - setcc->gtFlags |= GTF_USE_FLAGS; - BlockRange().InsertAfter(simdNode, setcc); - simdUse.ReplaceWith(comp, setcc); - } + if (!IsContainableHWIntrinsicOp(node, node->gtGetOp2(), &op2SupportsRegOptional) && + IsContainableHWIntrinsicOp(node, node->gtGetOp1(), &op1SupportsRegOptional)) + { + // Swap operands if op2 cannot be contained but op1 can. + swapOperands = true; } + } - simdNode->gtFlags |= GTF_SET_FLAGS; - simdNode->gtType = TYP_VOID; + if (swapOperands) + { + std::swap(node->gtOp1, node->gtOp2); + + if (cc != nullptr) + { + cc->gtCondition = GenCondition::Swap(cc->gtCondition); + } } -#endif - ContainCheckSIMD(simdNode); } -#endif // FEATURE_SIMD -#ifdef FEATURE_HW_INTRINSICS //---------------------------------------------------------------------------------------------- // Lowering::LowerHWIntrinsic: Perform containment analysis for a hardware intrinsic node. // @@ -916,6 +940,108 @@ void Lowering::LowerSIMD(GenTreeSIMD* simdNode) // void Lowering::LowerHWIntrinsic(GenTreeHWIntrinsic* node) { + switch (node->gtHWIntrinsicId) + { + case NI_SSE_CompareScalarOrderedEqual: + LowerHWIntrinsicCC(node, NI_SSE_COMISS, GenCondition::FEQ); + break; + case NI_SSE_CompareScalarOrderedNotEqual: + LowerHWIntrinsicCC(node, NI_SSE_COMISS, GenCondition::FNEU); + break; + case NI_SSE_CompareScalarOrderedLessThan: + LowerHWIntrinsicCC(node, NI_SSE_COMISS, GenCondition::FLT); + break; + case NI_SSE_CompareScalarOrderedLessThanOrEqual: + LowerHWIntrinsicCC(node, NI_SSE_COMISS, GenCondition::FLE); + break; + case NI_SSE_CompareScalarOrderedGreaterThan: + LowerHWIntrinsicCC(node, NI_SSE_COMISS, GenCondition::FGT); + break; + case NI_SSE_CompareScalarOrderedGreaterThanOrEqual: + LowerHWIntrinsicCC(node, NI_SSE_COMISS, GenCondition::FGE); + break; + + case NI_SSE_CompareScalarUnorderedEqual: + LowerHWIntrinsicCC(node, NI_SSE_UCOMISS, GenCondition::FEQ); + break; + case NI_SSE_CompareScalarUnorderedNotEqual: + LowerHWIntrinsicCC(node, NI_SSE_UCOMISS, GenCondition::FNEU); + break; + case NI_SSE_CompareScalarUnorderedLessThanOrEqual: + LowerHWIntrinsicCC(node, NI_SSE_UCOMISS, GenCondition::FLE); + break; + case NI_SSE_CompareScalarUnorderedLessThan: + LowerHWIntrinsicCC(node, NI_SSE_UCOMISS, GenCondition::FLT); + break; + case NI_SSE_CompareScalarUnorderedGreaterThanOrEqual: + LowerHWIntrinsicCC(node, NI_SSE_UCOMISS, GenCondition::FGE); + break; + case NI_SSE_CompareScalarUnorderedGreaterThan: + LowerHWIntrinsicCC(node, NI_SSE_UCOMISS, GenCondition::FGT); + break; + + case NI_SSE2_CompareScalarOrderedEqual: + LowerHWIntrinsicCC(node, NI_SSE2_COMISD, GenCondition::FEQ); + break; + case NI_SSE2_CompareScalarOrderedNotEqual: + LowerHWIntrinsicCC(node, NI_SSE2_COMISD, GenCondition::FNEU); + break; + case NI_SSE2_CompareScalarOrderedLessThan: + LowerHWIntrinsicCC(node, NI_SSE2_COMISD, GenCondition::FLT); + break; + case NI_SSE2_CompareScalarOrderedLessThanOrEqual: + LowerHWIntrinsicCC(node, NI_SSE2_COMISD, GenCondition::FLE); + break; + case NI_SSE2_CompareScalarOrderedGreaterThan: + LowerHWIntrinsicCC(node, NI_SSE2_COMISD, GenCondition::FGT); + break; + case NI_SSE2_CompareScalarOrderedGreaterThanOrEqual: + LowerHWIntrinsicCC(node, NI_SSE2_COMISD, GenCondition::FGE); + break; + + case NI_SSE2_CompareScalarUnorderedEqual: + LowerHWIntrinsicCC(node, NI_SSE2_UCOMISD, GenCondition::FEQ); + break; + case NI_SSE2_CompareScalarUnorderedNotEqual: + LowerHWIntrinsicCC(node, NI_SSE2_UCOMISD, GenCondition::FNEU); + break; + case NI_SSE2_CompareScalarUnorderedLessThanOrEqual: + LowerHWIntrinsicCC(node, NI_SSE2_UCOMISD, GenCondition::FLE); + break; + case NI_SSE2_CompareScalarUnorderedLessThan: + LowerHWIntrinsicCC(node, NI_SSE2_UCOMISD, GenCondition::FLT); + break; + case NI_SSE2_CompareScalarUnorderedGreaterThanOrEqual: + LowerHWIntrinsicCC(node, NI_SSE2_UCOMISD, GenCondition::FGE); + break; + case NI_SSE2_CompareScalarUnorderedGreaterThan: + LowerHWIntrinsicCC(node, NI_SSE2_UCOMISD, GenCondition::FGT); + break; + + case NI_SSE41_TestC: + LowerHWIntrinsicCC(node, NI_SSE41_PTEST, GenCondition::C); + break; + case NI_SSE41_TestZ: + LowerHWIntrinsicCC(node, NI_SSE41_PTEST, GenCondition::EQ); + break; + case NI_SSE41_TestNotZAndNotC: + LowerHWIntrinsicCC(node, NI_SSE41_PTEST, GenCondition::UGT); + break; + + case NI_AVX_TestC: + LowerHWIntrinsicCC(node, NI_AVX_PTEST, GenCondition::C); + break; + case NI_AVX_TestZ: + LowerHWIntrinsicCC(node, NI_AVX_PTEST, GenCondition::EQ); + break; + case NI_AVX_TestNotZAndNotC: + LowerHWIntrinsicCC(node, NI_AVX_PTEST, GenCondition::UGT); + break; + + default: + break; + } + ContainCheckHWIntrinsic(node); } #endif // FEATURE_HW_INTRINSICS @@ -2999,35 +3125,6 @@ void Lowering::ContainCheckHWIntrinsic(GenTreeHWIntrinsic* node) case HW_Category_SIMDScalar: case HW_Category_Scalar: { - if (HWIntrinsicInfo::GeneratesMultipleIns(intrinsicId)) - { - switch (intrinsicId) - { - case NI_SSE_CompareScalarOrderedLessThan: - case NI_SSE_CompareScalarUnorderedLessThan: - case NI_SSE_CompareScalarOrderedLessThanOrEqual: - case NI_SSE_CompareScalarUnorderedLessThanOrEqual: - case NI_SSE2_CompareScalarOrderedLessThan: - case NI_SSE2_CompareScalarUnorderedLessThan: - case NI_SSE2_CompareScalarOrderedLessThanOrEqual: - case NI_SSE2_CompareScalarUnorderedLessThanOrEqual: - { - // We need to swap the operands for CompareLessThanOrEqual - node->gtOp1 = op2; - node->gtOp2 = op1; - op2 = op1; - break; - } - - default: - { - // TODO-XArch-CQ: The CompareScalarOrdered* and CompareScalarUnordered* methods - // are commutative if you also inverse the intrinsic. - break; - } - } - } - bool supportsRegOptional = false; if (IsContainableHWIntrinsicOp(node, op2, &supportsRegOptional)) diff --git a/src/coreclr/src/jit/lsraxarch.cpp b/src/coreclr/src/jit/lsraxarch.cpp index d136eeb4f5904201d58ea23a2cdefbdac1e45884..9946d5859aeeb55b37a439b6e64196a24bdacfa7 100644 --- a/src/coreclr/src/jit/lsraxarch.cpp +++ b/src/coreclr/src/jit/lsraxarch.cpp @@ -2436,20 +2436,6 @@ int LinearScan::BuildHWIntrinsic(GenTreeHWIntrinsic* intrinsicTree) break; } - case NI_SSE_CompareScalarOrderedEqual: - case NI_SSE_CompareScalarUnorderedEqual: - case NI_SSE_CompareScalarOrderedNotEqual: - case NI_SSE_CompareScalarUnorderedNotEqual: - case NI_SSE2_CompareScalarOrderedEqual: - case NI_SSE2_CompareScalarUnorderedEqual: - case NI_SSE2_CompareScalarOrderedNotEqual: - case NI_SSE2_CompareScalarUnorderedNotEqual: - { - buildInternalIntRegisterDefForNode(intrinsicTree, allByteRegs()); - setInternalRegsDelayFree = true; - break; - } - case NI_SSE2_MaskMove: { assert(numArgs == 3); diff --git a/src/coreclr/tests/src/JIT/HardwareIntrinsics/X86/Regression/GitHub_17073/GitHub_17073.cs b/src/coreclr/tests/src/JIT/HardwareIntrinsics/X86/Regression/GitHub_17073/GitHub_17073.cs new file mode 100644 index 0000000000000000000000000000000000000000..8ae3aa2385ebff4a7f3d4cd691cdf3aab8c8de83 --- /dev/null +++ b/src/coreclr/tests/src/JIT/HardwareIntrinsics/X86/Regression/GitHub_17073/GitHub_17073.cs @@ -0,0 +1,2067 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. +// See the LICENSE file in the project root for more information. + +using System; +using System.Runtime.CompilerServices; +using System.Runtime.Intrinsics; +using System.Runtime.Intrinsics.X86; + +class Program +{ + [MethodImpl(MethodImplOptions.NoInlining)] static bool True() => true; + [MethodImpl(MethodImplOptions.NoInlining)] static bool False() => false; + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Check(bool expected, bool actual, [CallerLineNumber] int line = 0) + { + if (expected != actual) Console.WriteLine("Failed at line {0}", line); + return expected == actual; + } + + static int Main() + { + bool r = true; + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarOrderedEqual_Normal(Vector128.Create(42.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedEqual_Normal(Vector128.Create(41.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedEqual_Normal(Vector128.Create(42.0f), Vector128.Create(41.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedEqual_Normal(Vector128.Create(42.0f), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedEqual_Normal(Vector128.Create(float.NaN), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedEqual_LogicalNot(Vector128.Create(42.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarOrderedEqual_LogicalNot(Vector128.Create(41.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarOrderedEqual_LogicalNot(Vector128.Create(42.0f), Vector128.Create(41.0f))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarOrderedEqual_LogicalNot(Vector128.Create(42.0f), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarOrderedEqual_LogicalNot(Vector128.Create(float.NaN), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarOrderedEqual_Branch(Vector128.Create(42.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedEqual_Branch(Vector128.Create(41.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedEqual_Branch(Vector128.Create(42.0f), Vector128.Create(41.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedEqual_Branch(Vector128.Create(42.0f), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedEqual_Branch(Vector128.Create(float.NaN), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarOrderedEqual_Swap(Vector128.Create(42.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedEqual_Swap(Vector128.Create(41.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedEqual_Swap(Vector128.Create(42.0f), Vector128.Create(41.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedEqual_Swap(Vector128.Create(42.0f), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedEqual_Swap(Vector128.Create(float.NaN), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarOrderedEqual_Branch_Swap(Vector128.Create(42.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedEqual_Branch_Swap(Vector128.Create(41.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedEqual_Branch_Swap(Vector128.Create(42.0f), Vector128.Create(41.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedEqual_Branch_Swap(Vector128.Create(42.0f), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedEqual_Branch_Swap(Vector128.Create(float.NaN), Vector128.Create(float.NaN))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarOrderedEqual_Normal(Vector128.Create(42.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedEqual_Normal(Vector128.Create(41.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedEqual_Normal(Vector128.Create(42.0), Vector128.Create(41.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedEqual_Normal(Vector128.Create(42.0), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedEqual_Normal(Vector128.Create(double.NaN), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedEqual_LogicalNot(Vector128.Create(42.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarOrderedEqual_LogicalNot(Vector128.Create(41.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarOrderedEqual_LogicalNot(Vector128.Create(42.0), Vector128.Create(41.0))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarOrderedEqual_LogicalNot(Vector128.Create(42.0), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarOrderedEqual_LogicalNot(Vector128.Create(double.NaN), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarOrderedEqual_Branch(Vector128.Create(42.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedEqual_Branch(Vector128.Create(41.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedEqual_Branch(Vector128.Create(42.0), Vector128.Create(41.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedEqual_Branch(Vector128.Create(42.0), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedEqual_Branch(Vector128.Create(double.NaN), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarOrderedEqual_Swap(Vector128.Create(42.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedEqual_Swap(Vector128.Create(41.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedEqual_Swap(Vector128.Create(42.0), Vector128.Create(41.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedEqual_Swap(Vector128.Create(42.0), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedEqual_Swap(Vector128.Create(double.NaN), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarOrderedEqual_Branch_Swap(Vector128.Create(42.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedEqual_Branch_Swap(Vector128.Create(41.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedEqual_Branch_Swap(Vector128.Create(42.0), Vector128.Create(41.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedEqual_Branch_Swap(Vector128.Create(42.0), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedEqual_Branch_Swap(Vector128.Create(double.NaN), Vector128.Create(double.NaN))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedNotEqual_Normal(Vector128.Create(42.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarOrderedNotEqual_Normal(Vector128.Create(41.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarOrderedNotEqual_Normal(Vector128.Create(42.0f), Vector128.Create(41.0f))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarOrderedNotEqual_Normal(Vector128.Create(42.0f), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarOrderedNotEqual_Normal(Vector128.Create(float.NaN), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarOrderedNotEqual_LogicalNot(Vector128.Create(42.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedNotEqual_LogicalNot(Vector128.Create(41.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedNotEqual_LogicalNot(Vector128.Create(42.0f), Vector128.Create(41.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedNotEqual_LogicalNot(Vector128.Create(42.0f), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedNotEqual_LogicalNot(Vector128.Create(float.NaN), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedNotEqual_Branch(Vector128.Create(42.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarOrderedNotEqual_Branch(Vector128.Create(41.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarOrderedNotEqual_Branch(Vector128.Create(42.0f), Vector128.Create(41.0f))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarOrderedNotEqual_Branch(Vector128.Create(42.0f), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarOrderedNotEqual_Branch(Vector128.Create(float.NaN), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedNotEqual_Swap(Vector128.Create(42.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarOrderedNotEqual_Swap(Vector128.Create(41.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarOrderedNotEqual_Swap(Vector128.Create(42.0f), Vector128.Create(41.0f))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarOrderedNotEqual_Swap(Vector128.Create(42.0f), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarOrderedNotEqual_Swap(Vector128.Create(float.NaN), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedNotEqual_Branch_Swap(Vector128.Create(42.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarOrderedNotEqual_Branch_Swap(Vector128.Create(41.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarOrderedNotEqual_Branch_Swap(Vector128.Create(42.0f), Vector128.Create(41.0f))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarOrderedNotEqual_Branch_Swap(Vector128.Create(42.0f), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarOrderedNotEqual_Branch_Swap(Vector128.Create(float.NaN), Vector128.Create(float.NaN))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedNotEqual_Normal(Vector128.Create(42.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarOrderedNotEqual_Normal(Vector128.Create(41.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarOrderedNotEqual_Normal(Vector128.Create(42.0), Vector128.Create(41.0))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarOrderedNotEqual_Normal(Vector128.Create(42.0), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarOrderedNotEqual_Normal(Vector128.Create(double.NaN), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarOrderedNotEqual_LogicalNot(Vector128.Create(42.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedNotEqual_LogicalNot(Vector128.Create(41.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedNotEqual_LogicalNot(Vector128.Create(42.0), Vector128.Create(41.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedNotEqual_LogicalNot(Vector128.Create(42.0), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedNotEqual_LogicalNot(Vector128.Create(double.NaN), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedNotEqual_Branch(Vector128.Create(42.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarOrderedNotEqual_Branch(Vector128.Create(41.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarOrderedNotEqual_Branch(Vector128.Create(42.0), Vector128.Create(41.0))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarOrderedNotEqual_Branch(Vector128.Create(42.0), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarOrderedNotEqual_Branch(Vector128.Create(double.NaN), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedNotEqual_Swap(Vector128.Create(42.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarOrderedNotEqual_Swap(Vector128.Create(41.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarOrderedNotEqual_Swap(Vector128.Create(42.0), Vector128.Create(41.0))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarOrderedNotEqual_Swap(Vector128.Create(42.0), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarOrderedNotEqual_Swap(Vector128.Create(double.NaN), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedNotEqual_Branch_Swap(Vector128.Create(42.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarOrderedNotEqual_Branch_Swap(Vector128.Create(41.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarOrderedNotEqual_Branch_Swap(Vector128.Create(42.0), Vector128.Create(41.0))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarOrderedNotEqual_Branch_Swap(Vector128.Create(42.0), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarOrderedNotEqual_Branch_Swap(Vector128.Create(double.NaN), Vector128.Create(double.NaN))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedLessThan_Normal(Vector128.Create(42.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarOrderedLessThan_Normal(Vector128.Create(41.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedLessThan_Normal(Vector128.Create(42.0f), Vector128.Create(41.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedLessThan_Normal(Vector128.Create(42.0f), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedLessThan_Normal(Vector128.Create(float.NaN), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarOrderedLessThan_LogicalNot(Vector128.Create(42.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedLessThan_LogicalNot(Vector128.Create(41.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarOrderedLessThan_LogicalNot(Vector128.Create(42.0f), Vector128.Create(41.0f))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarOrderedLessThan_LogicalNot(Vector128.Create(42.0f), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarOrderedLessThan_LogicalNot(Vector128.Create(float.NaN), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedLessThan_Branch(Vector128.Create(42.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarOrderedLessThan_Branch(Vector128.Create(41.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedLessThan_Branch(Vector128.Create(42.0f), Vector128.Create(41.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedLessThan_Branch(Vector128.Create(42.0f), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedLessThan_Branch(Vector128.Create(float.NaN), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedLessThan_Swap(Vector128.Create(42.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarOrderedLessThan_Swap(Vector128.Create(41.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedLessThan_Swap(Vector128.Create(42.0f), Vector128.Create(41.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedLessThan_Swap(Vector128.Create(42.0f), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedLessThan_Swap(Vector128.Create(float.NaN), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedLessThan_Branch_Swap(Vector128.Create(42.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarOrderedLessThan_Branch_Swap(Vector128.Create(41.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedLessThan_Branch_Swap(Vector128.Create(42.0f), Vector128.Create(41.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedLessThan_Branch_Swap(Vector128.Create(42.0f), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedLessThan_Branch_Swap(Vector128.Create(float.NaN), Vector128.Create(float.NaN))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedLessThan_Normal(Vector128.Create(42.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarOrderedLessThan_Normal(Vector128.Create(41.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedLessThan_Normal(Vector128.Create(42.0), Vector128.Create(41.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedLessThan_Normal(Vector128.Create(42.0), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedLessThan_Normal(Vector128.Create(double.NaN), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarOrderedLessThan_LogicalNot(Vector128.Create(42.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedLessThan_LogicalNot(Vector128.Create(41.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarOrderedLessThan_LogicalNot(Vector128.Create(42.0), Vector128.Create(41.0))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarOrderedLessThan_LogicalNot(Vector128.Create(42.0), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarOrderedLessThan_LogicalNot(Vector128.Create(double.NaN), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedLessThan_Branch(Vector128.Create(42.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarOrderedLessThan_Branch(Vector128.Create(41.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedLessThan_Branch(Vector128.Create(42.0), Vector128.Create(41.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedLessThan_Branch(Vector128.Create(42.0), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedLessThan_Branch(Vector128.Create(double.NaN), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedLessThan_Swap(Vector128.Create(42.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarOrderedLessThan_Swap(Vector128.Create(41.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedLessThan_Swap(Vector128.Create(42.0), Vector128.Create(41.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedLessThan_Swap(Vector128.Create(42.0), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedLessThan_Swap(Vector128.Create(double.NaN), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedLessThan_Branch_Swap(Vector128.Create(42.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarOrderedLessThan_Branch_Swap(Vector128.Create(41.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedLessThan_Branch_Swap(Vector128.Create(42.0), Vector128.Create(41.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedLessThan_Branch_Swap(Vector128.Create(42.0), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedLessThan_Branch_Swap(Vector128.Create(double.NaN), Vector128.Create(double.NaN))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarOrderedLessThanOrEqual_Normal(Vector128.Create(42.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarOrderedLessThanOrEqual_Normal(Vector128.Create(41.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedLessThanOrEqual_Normal(Vector128.Create(42.0f), Vector128.Create(41.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedLessThanOrEqual_Normal(Vector128.Create(42.0f), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedLessThanOrEqual_Normal(Vector128.Create(float.NaN), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedLessThanOrEqual_LogicalNot(Vector128.Create(42.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedLessThanOrEqual_LogicalNot(Vector128.Create(41.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarOrderedLessThanOrEqual_LogicalNot(Vector128.Create(42.0f), Vector128.Create(41.0f))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarOrderedLessThanOrEqual_LogicalNot(Vector128.Create(42.0f), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarOrderedLessThanOrEqual_LogicalNot(Vector128.Create(float.NaN), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarOrderedLessThanOrEqual_Branch(Vector128.Create(42.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarOrderedLessThanOrEqual_Branch(Vector128.Create(41.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedLessThanOrEqual_Branch(Vector128.Create(42.0f), Vector128.Create(41.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedLessThanOrEqual_Branch(Vector128.Create(42.0f), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedLessThanOrEqual_Branch(Vector128.Create(float.NaN), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarOrderedLessThanOrEqual_Swap(Vector128.Create(42.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarOrderedLessThanOrEqual_Swap(Vector128.Create(41.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedLessThanOrEqual_Swap(Vector128.Create(42.0f), Vector128.Create(41.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedLessThanOrEqual_Swap(Vector128.Create(42.0f), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedLessThanOrEqual_Swap(Vector128.Create(float.NaN), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarOrderedLessThanOrEqual_Branch_Swap(Vector128.Create(42.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarOrderedLessThanOrEqual_Branch_Swap(Vector128.Create(41.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedLessThanOrEqual_Branch_Swap(Vector128.Create(42.0f), Vector128.Create(41.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedLessThanOrEqual_Branch_Swap(Vector128.Create(42.0f), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedLessThanOrEqual_Branch_Swap(Vector128.Create(float.NaN), Vector128.Create(float.NaN))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarOrderedLessThanOrEqual_Normal(Vector128.Create(42.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarOrderedLessThanOrEqual_Normal(Vector128.Create(41.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedLessThanOrEqual_Normal(Vector128.Create(42.0), Vector128.Create(41.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedLessThanOrEqual_Normal(Vector128.Create(42.0), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedLessThanOrEqual_Normal(Vector128.Create(double.NaN), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedLessThanOrEqual_LogicalNot(Vector128.Create(42.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedLessThanOrEqual_LogicalNot(Vector128.Create(41.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarOrderedLessThanOrEqual_LogicalNot(Vector128.Create(42.0), Vector128.Create(41.0))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarOrderedLessThanOrEqual_LogicalNot(Vector128.Create(42.0), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarOrderedLessThanOrEqual_LogicalNot(Vector128.Create(double.NaN), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarOrderedLessThanOrEqual_Branch(Vector128.Create(42.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarOrderedLessThanOrEqual_Branch(Vector128.Create(41.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedLessThanOrEqual_Branch(Vector128.Create(42.0), Vector128.Create(41.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedLessThanOrEqual_Branch(Vector128.Create(42.0), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedLessThanOrEqual_Branch(Vector128.Create(double.NaN), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarOrderedLessThanOrEqual_Swap(Vector128.Create(42.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarOrderedLessThanOrEqual_Swap(Vector128.Create(41.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedLessThanOrEqual_Swap(Vector128.Create(42.0), Vector128.Create(41.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedLessThanOrEqual_Swap(Vector128.Create(42.0), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedLessThanOrEqual_Swap(Vector128.Create(double.NaN), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarOrderedLessThanOrEqual_Branch_Swap(Vector128.Create(42.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarOrderedLessThanOrEqual_Branch_Swap(Vector128.Create(41.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedLessThanOrEqual_Branch_Swap(Vector128.Create(42.0), Vector128.Create(41.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedLessThanOrEqual_Branch_Swap(Vector128.Create(42.0), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedLessThanOrEqual_Branch_Swap(Vector128.Create(double.NaN), Vector128.Create(double.NaN))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedGreaterThan_Normal(Vector128.Create(42.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedGreaterThan_Normal(Vector128.Create(41.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarOrderedGreaterThan_Normal(Vector128.Create(42.0f), Vector128.Create(41.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedGreaterThan_Normal(Vector128.Create(42.0f), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedGreaterThan_Normal(Vector128.Create(float.NaN), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarOrderedGreaterThan_LogicalNot(Vector128.Create(42.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarOrderedGreaterThan_LogicalNot(Vector128.Create(41.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedGreaterThan_LogicalNot(Vector128.Create(42.0f), Vector128.Create(41.0f))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarOrderedGreaterThan_LogicalNot(Vector128.Create(42.0f), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarOrderedGreaterThan_LogicalNot(Vector128.Create(float.NaN), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedGreaterThan_Branch(Vector128.Create(42.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedGreaterThan_Branch(Vector128.Create(41.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarOrderedGreaterThan_Branch(Vector128.Create(42.0f), Vector128.Create(41.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedGreaterThan_Branch(Vector128.Create(42.0f), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedGreaterThan_Branch(Vector128.Create(float.NaN), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedGreaterThan_Swap(Vector128.Create(42.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedGreaterThan_Swap(Vector128.Create(41.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarOrderedGreaterThan_Swap(Vector128.Create(42.0f), Vector128.Create(41.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedGreaterThan_Swap(Vector128.Create(42.0f), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedGreaterThan_Swap(Vector128.Create(float.NaN), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedGreaterThan_Branch_Swap(Vector128.Create(42.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedGreaterThan_Branch_Swap(Vector128.Create(41.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarOrderedGreaterThan_Branch_Swap(Vector128.Create(42.0f), Vector128.Create(41.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedGreaterThan_Branch_Swap(Vector128.Create(42.0f), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedGreaterThan_Branch_Swap(Vector128.Create(float.NaN), Vector128.Create(float.NaN))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedGreaterThan_Normal(Vector128.Create(42.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedGreaterThan_Normal(Vector128.Create(41.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarOrderedGreaterThan_Normal(Vector128.Create(42.0), Vector128.Create(41.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedGreaterThan_Normal(Vector128.Create(42.0), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedGreaterThan_Normal(Vector128.Create(double.NaN), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarOrderedGreaterThan_LogicalNot(Vector128.Create(42.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarOrderedGreaterThan_LogicalNot(Vector128.Create(41.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedGreaterThan_LogicalNot(Vector128.Create(42.0), Vector128.Create(41.0))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarOrderedGreaterThan_LogicalNot(Vector128.Create(42.0), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarOrderedGreaterThan_LogicalNot(Vector128.Create(double.NaN), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedGreaterThan_Branch(Vector128.Create(42.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedGreaterThan_Branch(Vector128.Create(41.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarOrderedGreaterThan_Branch(Vector128.Create(42.0), Vector128.Create(41.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedGreaterThan_Branch(Vector128.Create(42.0), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedGreaterThan_Branch(Vector128.Create(double.NaN), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedGreaterThan_Swap(Vector128.Create(42.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedGreaterThan_Swap(Vector128.Create(41.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarOrderedGreaterThan_Swap(Vector128.Create(42.0), Vector128.Create(41.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedGreaterThan_Swap(Vector128.Create(42.0), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedGreaterThan_Swap(Vector128.Create(double.NaN), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedGreaterThan_Branch_Swap(Vector128.Create(42.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedGreaterThan_Branch_Swap(Vector128.Create(41.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarOrderedGreaterThan_Branch_Swap(Vector128.Create(42.0), Vector128.Create(41.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedGreaterThan_Branch_Swap(Vector128.Create(42.0), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedGreaterThan_Branch_Swap(Vector128.Create(double.NaN), Vector128.Create(double.NaN))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarOrderedGreaterThanOrEqual_Normal(Vector128.Create(42.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedGreaterThanOrEqual_Normal(Vector128.Create(41.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarOrderedGreaterThanOrEqual_Normal(Vector128.Create(42.0f), Vector128.Create(41.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedGreaterThanOrEqual_Normal(Vector128.Create(42.0f), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedGreaterThanOrEqual_Normal(Vector128.Create(float.NaN), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedGreaterThanOrEqual_LogicalNot(Vector128.Create(42.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarOrderedGreaterThanOrEqual_LogicalNot(Vector128.Create(41.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedGreaterThanOrEqual_LogicalNot(Vector128.Create(42.0f), Vector128.Create(41.0f))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarOrderedGreaterThanOrEqual_LogicalNot(Vector128.Create(42.0f), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarOrderedGreaterThanOrEqual_LogicalNot(Vector128.Create(float.NaN), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarOrderedGreaterThanOrEqual_Branch(Vector128.Create(42.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedGreaterThanOrEqual_Branch(Vector128.Create(41.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarOrderedGreaterThanOrEqual_Branch(Vector128.Create(42.0f), Vector128.Create(41.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedGreaterThanOrEqual_Branch(Vector128.Create(42.0f), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedGreaterThanOrEqual_Branch(Vector128.Create(float.NaN), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarOrderedGreaterThanOrEqual_Swap(Vector128.Create(42.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedGreaterThanOrEqual_Swap(Vector128.Create(41.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarOrderedGreaterThanOrEqual_Swap(Vector128.Create(42.0f), Vector128.Create(41.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedGreaterThanOrEqual_Swap(Vector128.Create(42.0f), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedGreaterThanOrEqual_Swap(Vector128.Create(float.NaN), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarOrderedGreaterThanOrEqual_Branch_Swap(Vector128.Create(42.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedGreaterThanOrEqual_Branch_Swap(Vector128.Create(41.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarOrderedGreaterThanOrEqual_Branch_Swap(Vector128.Create(42.0f), Vector128.Create(41.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedGreaterThanOrEqual_Branch_Swap(Vector128.Create(42.0f), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarOrderedGreaterThanOrEqual_Branch_Swap(Vector128.Create(float.NaN), Vector128.Create(float.NaN))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarOrderedGreaterThanOrEqual_Normal(Vector128.Create(42.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedGreaterThanOrEqual_Normal(Vector128.Create(41.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarOrderedGreaterThanOrEqual_Normal(Vector128.Create(42.0), Vector128.Create(41.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedGreaterThanOrEqual_Normal(Vector128.Create(42.0), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedGreaterThanOrEqual_Normal(Vector128.Create(double.NaN), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedGreaterThanOrEqual_LogicalNot(Vector128.Create(42.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarOrderedGreaterThanOrEqual_LogicalNot(Vector128.Create(41.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedGreaterThanOrEqual_LogicalNot(Vector128.Create(42.0), Vector128.Create(41.0))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarOrderedGreaterThanOrEqual_LogicalNot(Vector128.Create(42.0), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarOrderedGreaterThanOrEqual_LogicalNot(Vector128.Create(double.NaN), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarOrderedGreaterThanOrEqual_Branch(Vector128.Create(42.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedGreaterThanOrEqual_Branch(Vector128.Create(41.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarOrderedGreaterThanOrEqual_Branch(Vector128.Create(42.0), Vector128.Create(41.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedGreaterThanOrEqual_Branch(Vector128.Create(42.0), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedGreaterThanOrEqual_Branch(Vector128.Create(double.NaN), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarOrderedGreaterThanOrEqual_Swap(Vector128.Create(42.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedGreaterThanOrEqual_Swap(Vector128.Create(41.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarOrderedGreaterThanOrEqual_Swap(Vector128.Create(42.0), Vector128.Create(41.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedGreaterThanOrEqual_Swap(Vector128.Create(42.0), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedGreaterThanOrEqual_Swap(Vector128.Create(double.NaN), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarOrderedGreaterThanOrEqual_Branch_Swap(Vector128.Create(42.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedGreaterThanOrEqual_Branch_Swap(Vector128.Create(41.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarOrderedGreaterThanOrEqual_Branch_Swap(Vector128.Create(42.0), Vector128.Create(41.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedGreaterThanOrEqual_Branch_Swap(Vector128.Create(42.0), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarOrderedGreaterThanOrEqual_Branch_Swap(Vector128.Create(double.NaN), Vector128.Create(double.NaN))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarUnorderedEqual_Normal(Vector128.Create(42.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedEqual_Normal(Vector128.Create(41.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedEqual_Normal(Vector128.Create(42.0f), Vector128.Create(41.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedEqual_Normal(Vector128.Create(42.0f), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedEqual_Normal(Vector128.Create(float.NaN), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedEqual_LogicalNot(Vector128.Create(42.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarUnorderedEqual_LogicalNot(Vector128.Create(41.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarUnorderedEqual_LogicalNot(Vector128.Create(42.0f), Vector128.Create(41.0f))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarUnorderedEqual_LogicalNot(Vector128.Create(42.0f), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarUnorderedEqual_LogicalNot(Vector128.Create(float.NaN), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarUnorderedEqual_Branch(Vector128.Create(42.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedEqual_Branch(Vector128.Create(41.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedEqual_Branch(Vector128.Create(42.0f), Vector128.Create(41.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedEqual_Branch(Vector128.Create(42.0f), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedEqual_Branch(Vector128.Create(float.NaN), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarUnorderedEqual_Swap(Vector128.Create(42.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedEqual_Swap(Vector128.Create(41.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedEqual_Swap(Vector128.Create(42.0f), Vector128.Create(41.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedEqual_Swap(Vector128.Create(42.0f), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedEqual_Swap(Vector128.Create(float.NaN), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarUnorderedEqual_Branch_Swap(Vector128.Create(42.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedEqual_Branch_Swap(Vector128.Create(41.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedEqual_Branch_Swap(Vector128.Create(42.0f), Vector128.Create(41.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedEqual_Branch_Swap(Vector128.Create(42.0f), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedEqual_Branch_Swap(Vector128.Create(float.NaN), Vector128.Create(float.NaN))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarUnorderedEqual_Normal(Vector128.Create(42.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedEqual_Normal(Vector128.Create(41.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedEqual_Normal(Vector128.Create(42.0), Vector128.Create(41.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedEqual_Normal(Vector128.Create(42.0), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedEqual_Normal(Vector128.Create(double.NaN), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedEqual_LogicalNot(Vector128.Create(42.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarUnorderedEqual_LogicalNot(Vector128.Create(41.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarUnorderedEqual_LogicalNot(Vector128.Create(42.0), Vector128.Create(41.0))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarUnorderedEqual_LogicalNot(Vector128.Create(42.0), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarUnorderedEqual_LogicalNot(Vector128.Create(double.NaN), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarUnorderedEqual_Branch(Vector128.Create(42.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedEqual_Branch(Vector128.Create(41.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedEqual_Branch(Vector128.Create(42.0), Vector128.Create(41.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedEqual_Branch(Vector128.Create(42.0), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedEqual_Branch(Vector128.Create(double.NaN), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarUnorderedEqual_Swap(Vector128.Create(42.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedEqual_Swap(Vector128.Create(41.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedEqual_Swap(Vector128.Create(42.0), Vector128.Create(41.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedEqual_Swap(Vector128.Create(42.0), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedEqual_Swap(Vector128.Create(double.NaN), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarUnorderedEqual_Branch_Swap(Vector128.Create(42.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedEqual_Branch_Swap(Vector128.Create(41.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedEqual_Branch_Swap(Vector128.Create(42.0), Vector128.Create(41.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedEqual_Branch_Swap(Vector128.Create(42.0), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedEqual_Branch_Swap(Vector128.Create(double.NaN), Vector128.Create(double.NaN))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedNotEqual_Normal(Vector128.Create(42.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarUnorderedNotEqual_Normal(Vector128.Create(41.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarUnorderedNotEqual_Normal(Vector128.Create(42.0f), Vector128.Create(41.0f))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarUnorderedNotEqual_Normal(Vector128.Create(42.0f), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarUnorderedNotEqual_Normal(Vector128.Create(float.NaN), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarUnorderedNotEqual_LogicalNot(Vector128.Create(42.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedNotEqual_LogicalNot(Vector128.Create(41.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedNotEqual_LogicalNot(Vector128.Create(42.0f), Vector128.Create(41.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedNotEqual_LogicalNot(Vector128.Create(42.0f), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedNotEqual_LogicalNot(Vector128.Create(float.NaN), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedNotEqual_Branch(Vector128.Create(42.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarUnorderedNotEqual_Branch(Vector128.Create(41.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarUnorderedNotEqual_Branch(Vector128.Create(42.0f), Vector128.Create(41.0f))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarUnorderedNotEqual_Branch(Vector128.Create(42.0f), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarUnorderedNotEqual_Branch(Vector128.Create(float.NaN), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedNotEqual_Swap(Vector128.Create(42.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarUnorderedNotEqual_Swap(Vector128.Create(41.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarUnorderedNotEqual_Swap(Vector128.Create(42.0f), Vector128.Create(41.0f))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarUnorderedNotEqual_Swap(Vector128.Create(42.0f), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarUnorderedNotEqual_Swap(Vector128.Create(float.NaN), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedNotEqual_Branch_Swap(Vector128.Create(42.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarUnorderedNotEqual_Branch_Swap(Vector128.Create(41.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarUnorderedNotEqual_Branch_Swap(Vector128.Create(42.0f), Vector128.Create(41.0f))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarUnorderedNotEqual_Branch_Swap(Vector128.Create(42.0f), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarUnorderedNotEqual_Branch_Swap(Vector128.Create(float.NaN), Vector128.Create(float.NaN))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedNotEqual_Normal(Vector128.Create(42.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarUnorderedNotEqual_Normal(Vector128.Create(41.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarUnorderedNotEqual_Normal(Vector128.Create(42.0), Vector128.Create(41.0))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarUnorderedNotEqual_Normal(Vector128.Create(42.0), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarUnorderedNotEqual_Normal(Vector128.Create(double.NaN), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarUnorderedNotEqual_LogicalNot(Vector128.Create(42.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedNotEqual_LogicalNot(Vector128.Create(41.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedNotEqual_LogicalNot(Vector128.Create(42.0), Vector128.Create(41.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedNotEqual_LogicalNot(Vector128.Create(42.0), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedNotEqual_LogicalNot(Vector128.Create(double.NaN), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedNotEqual_Branch(Vector128.Create(42.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarUnorderedNotEqual_Branch(Vector128.Create(41.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarUnorderedNotEqual_Branch(Vector128.Create(42.0), Vector128.Create(41.0))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarUnorderedNotEqual_Branch(Vector128.Create(42.0), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarUnorderedNotEqual_Branch(Vector128.Create(double.NaN), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedNotEqual_Swap(Vector128.Create(42.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarUnorderedNotEqual_Swap(Vector128.Create(41.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarUnorderedNotEqual_Swap(Vector128.Create(42.0), Vector128.Create(41.0))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarUnorderedNotEqual_Swap(Vector128.Create(42.0), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarUnorderedNotEqual_Swap(Vector128.Create(double.NaN), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedNotEqual_Branch_Swap(Vector128.Create(42.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarUnorderedNotEqual_Branch_Swap(Vector128.Create(41.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarUnorderedNotEqual_Branch_Swap(Vector128.Create(42.0), Vector128.Create(41.0))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarUnorderedNotEqual_Branch_Swap(Vector128.Create(42.0), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarUnorderedNotEqual_Branch_Swap(Vector128.Create(double.NaN), Vector128.Create(double.NaN))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedLessThan_Normal(Vector128.Create(42.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarUnorderedLessThan_Normal(Vector128.Create(41.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedLessThan_Normal(Vector128.Create(42.0f), Vector128.Create(41.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedLessThan_Normal(Vector128.Create(42.0f), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedLessThan_Normal(Vector128.Create(float.NaN), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarUnorderedLessThan_LogicalNot(Vector128.Create(42.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedLessThan_LogicalNot(Vector128.Create(41.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarUnorderedLessThan_LogicalNot(Vector128.Create(42.0f), Vector128.Create(41.0f))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarUnorderedLessThan_LogicalNot(Vector128.Create(42.0f), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarUnorderedLessThan_LogicalNot(Vector128.Create(float.NaN), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedLessThan_Branch(Vector128.Create(42.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarUnorderedLessThan_Branch(Vector128.Create(41.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedLessThan_Branch(Vector128.Create(42.0f), Vector128.Create(41.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedLessThan_Branch(Vector128.Create(42.0f), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedLessThan_Branch(Vector128.Create(float.NaN), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedLessThan_Swap(Vector128.Create(42.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarUnorderedLessThan_Swap(Vector128.Create(41.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedLessThan_Swap(Vector128.Create(42.0f), Vector128.Create(41.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedLessThan_Swap(Vector128.Create(42.0f), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedLessThan_Swap(Vector128.Create(float.NaN), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedLessThan_Branch_Swap(Vector128.Create(42.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarUnorderedLessThan_Branch_Swap(Vector128.Create(41.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedLessThan_Branch_Swap(Vector128.Create(42.0f), Vector128.Create(41.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedLessThan_Branch_Swap(Vector128.Create(42.0f), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedLessThan_Branch_Swap(Vector128.Create(float.NaN), Vector128.Create(float.NaN))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedLessThan_Normal(Vector128.Create(42.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarUnorderedLessThan_Normal(Vector128.Create(41.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedLessThan_Normal(Vector128.Create(42.0), Vector128.Create(41.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedLessThan_Normal(Vector128.Create(42.0), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedLessThan_Normal(Vector128.Create(double.NaN), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarUnorderedLessThan_LogicalNot(Vector128.Create(42.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedLessThan_LogicalNot(Vector128.Create(41.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarUnorderedLessThan_LogicalNot(Vector128.Create(42.0), Vector128.Create(41.0))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarUnorderedLessThan_LogicalNot(Vector128.Create(42.0), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarUnorderedLessThan_LogicalNot(Vector128.Create(double.NaN), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedLessThan_Branch(Vector128.Create(42.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarUnorderedLessThan_Branch(Vector128.Create(41.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedLessThan_Branch(Vector128.Create(42.0), Vector128.Create(41.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedLessThan_Branch(Vector128.Create(42.0), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedLessThan_Branch(Vector128.Create(double.NaN), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedLessThan_Swap(Vector128.Create(42.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarUnorderedLessThan_Swap(Vector128.Create(41.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedLessThan_Swap(Vector128.Create(42.0), Vector128.Create(41.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedLessThan_Swap(Vector128.Create(42.0), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedLessThan_Swap(Vector128.Create(double.NaN), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedLessThan_Branch_Swap(Vector128.Create(42.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarUnorderedLessThan_Branch_Swap(Vector128.Create(41.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedLessThan_Branch_Swap(Vector128.Create(42.0), Vector128.Create(41.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedLessThan_Branch_Swap(Vector128.Create(42.0), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedLessThan_Branch_Swap(Vector128.Create(double.NaN), Vector128.Create(double.NaN))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarUnorderedLessThanOrEqual_Normal(Vector128.Create(42.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarUnorderedLessThanOrEqual_Normal(Vector128.Create(41.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedLessThanOrEqual_Normal(Vector128.Create(42.0f), Vector128.Create(41.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedLessThanOrEqual_Normal(Vector128.Create(42.0f), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedLessThanOrEqual_Normal(Vector128.Create(float.NaN), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedLessThanOrEqual_LogicalNot(Vector128.Create(42.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedLessThanOrEqual_LogicalNot(Vector128.Create(41.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarUnorderedLessThanOrEqual_LogicalNot(Vector128.Create(42.0f), Vector128.Create(41.0f))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarUnorderedLessThanOrEqual_LogicalNot(Vector128.Create(42.0f), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarUnorderedLessThanOrEqual_LogicalNot(Vector128.Create(float.NaN), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarUnorderedLessThanOrEqual_Branch(Vector128.Create(42.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarUnorderedLessThanOrEqual_Branch(Vector128.Create(41.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedLessThanOrEqual_Branch(Vector128.Create(42.0f), Vector128.Create(41.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedLessThanOrEqual_Branch(Vector128.Create(42.0f), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedLessThanOrEqual_Branch(Vector128.Create(float.NaN), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarUnorderedLessThanOrEqual_Swap(Vector128.Create(42.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarUnorderedLessThanOrEqual_Swap(Vector128.Create(41.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedLessThanOrEqual_Swap(Vector128.Create(42.0f), Vector128.Create(41.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedLessThanOrEqual_Swap(Vector128.Create(42.0f), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedLessThanOrEqual_Swap(Vector128.Create(float.NaN), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarUnorderedLessThanOrEqual_Branch_Swap(Vector128.Create(42.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarUnorderedLessThanOrEqual_Branch_Swap(Vector128.Create(41.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedLessThanOrEqual_Branch_Swap(Vector128.Create(42.0f), Vector128.Create(41.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedLessThanOrEqual_Branch_Swap(Vector128.Create(42.0f), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedLessThanOrEqual_Branch_Swap(Vector128.Create(float.NaN), Vector128.Create(float.NaN))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarUnorderedLessThanOrEqual_Normal(Vector128.Create(42.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarUnorderedLessThanOrEqual_Normal(Vector128.Create(41.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedLessThanOrEqual_Normal(Vector128.Create(42.0), Vector128.Create(41.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedLessThanOrEqual_Normal(Vector128.Create(42.0), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedLessThanOrEqual_Normal(Vector128.Create(double.NaN), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedLessThanOrEqual_LogicalNot(Vector128.Create(42.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedLessThanOrEqual_LogicalNot(Vector128.Create(41.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarUnorderedLessThanOrEqual_LogicalNot(Vector128.Create(42.0), Vector128.Create(41.0))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarUnorderedLessThanOrEqual_LogicalNot(Vector128.Create(42.0), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarUnorderedLessThanOrEqual_LogicalNot(Vector128.Create(double.NaN), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarUnorderedLessThanOrEqual_Branch(Vector128.Create(42.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarUnorderedLessThanOrEqual_Branch(Vector128.Create(41.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedLessThanOrEqual_Branch(Vector128.Create(42.0), Vector128.Create(41.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedLessThanOrEqual_Branch(Vector128.Create(42.0), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedLessThanOrEqual_Branch(Vector128.Create(double.NaN), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarUnorderedLessThanOrEqual_Swap(Vector128.Create(42.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarUnorderedLessThanOrEqual_Swap(Vector128.Create(41.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedLessThanOrEqual_Swap(Vector128.Create(42.0), Vector128.Create(41.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedLessThanOrEqual_Swap(Vector128.Create(42.0), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedLessThanOrEqual_Swap(Vector128.Create(double.NaN), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarUnorderedLessThanOrEqual_Branch_Swap(Vector128.Create(42.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarUnorderedLessThanOrEqual_Branch_Swap(Vector128.Create(41.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedLessThanOrEqual_Branch_Swap(Vector128.Create(42.0), Vector128.Create(41.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedLessThanOrEqual_Branch_Swap(Vector128.Create(42.0), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedLessThanOrEqual_Branch_Swap(Vector128.Create(double.NaN), Vector128.Create(double.NaN))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedGreaterThan_Normal(Vector128.Create(42.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedGreaterThan_Normal(Vector128.Create(41.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarUnorderedGreaterThan_Normal(Vector128.Create(42.0f), Vector128.Create(41.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedGreaterThan_Normal(Vector128.Create(42.0f), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedGreaterThan_Normal(Vector128.Create(float.NaN), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarUnorderedGreaterThan_LogicalNot(Vector128.Create(42.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarUnorderedGreaterThan_LogicalNot(Vector128.Create(41.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedGreaterThan_LogicalNot(Vector128.Create(42.0f), Vector128.Create(41.0f))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarUnorderedGreaterThan_LogicalNot(Vector128.Create(42.0f), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarUnorderedGreaterThan_LogicalNot(Vector128.Create(float.NaN), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedGreaterThan_Branch(Vector128.Create(42.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedGreaterThan_Branch(Vector128.Create(41.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarUnorderedGreaterThan_Branch(Vector128.Create(42.0f), Vector128.Create(41.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedGreaterThan_Branch(Vector128.Create(42.0f), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedGreaterThan_Branch(Vector128.Create(float.NaN), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedGreaterThan_Swap(Vector128.Create(42.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedGreaterThan_Swap(Vector128.Create(41.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarUnorderedGreaterThan_Swap(Vector128.Create(42.0f), Vector128.Create(41.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedGreaterThan_Swap(Vector128.Create(42.0f), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedGreaterThan_Swap(Vector128.Create(float.NaN), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedGreaterThan_Branch_Swap(Vector128.Create(42.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedGreaterThan_Branch_Swap(Vector128.Create(41.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarUnorderedGreaterThan_Branch_Swap(Vector128.Create(42.0f), Vector128.Create(41.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedGreaterThan_Branch_Swap(Vector128.Create(42.0f), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedGreaterThan_Branch_Swap(Vector128.Create(float.NaN), Vector128.Create(float.NaN))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedGreaterThan_Normal(Vector128.Create(42.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedGreaterThan_Normal(Vector128.Create(41.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarUnorderedGreaterThan_Normal(Vector128.Create(42.0), Vector128.Create(41.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedGreaterThan_Normal(Vector128.Create(42.0), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedGreaterThan_Normal(Vector128.Create(double.NaN), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarUnorderedGreaterThan_LogicalNot(Vector128.Create(42.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarUnorderedGreaterThan_LogicalNot(Vector128.Create(41.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedGreaterThan_LogicalNot(Vector128.Create(42.0), Vector128.Create(41.0))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarUnorderedGreaterThan_LogicalNot(Vector128.Create(42.0), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarUnorderedGreaterThan_LogicalNot(Vector128.Create(double.NaN), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedGreaterThan_Branch(Vector128.Create(42.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedGreaterThan_Branch(Vector128.Create(41.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarUnorderedGreaterThan_Branch(Vector128.Create(42.0), Vector128.Create(41.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedGreaterThan_Branch(Vector128.Create(42.0), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedGreaterThan_Branch(Vector128.Create(double.NaN), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedGreaterThan_Swap(Vector128.Create(42.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedGreaterThan_Swap(Vector128.Create(41.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarUnorderedGreaterThan_Swap(Vector128.Create(42.0), Vector128.Create(41.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedGreaterThan_Swap(Vector128.Create(42.0), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedGreaterThan_Swap(Vector128.Create(double.NaN), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedGreaterThan_Branch_Swap(Vector128.Create(42.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedGreaterThan_Branch_Swap(Vector128.Create(41.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarUnorderedGreaterThan_Branch_Swap(Vector128.Create(42.0), Vector128.Create(41.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedGreaterThan_Branch_Swap(Vector128.Create(42.0), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedGreaterThan_Branch_Swap(Vector128.Create(double.NaN), Vector128.Create(double.NaN))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarUnorderedGreaterThanOrEqual_Normal(Vector128.Create(42.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedGreaterThanOrEqual_Normal(Vector128.Create(41.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarUnorderedGreaterThanOrEqual_Normal(Vector128.Create(42.0f), Vector128.Create(41.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedGreaterThanOrEqual_Normal(Vector128.Create(42.0f), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedGreaterThanOrEqual_Normal(Vector128.Create(float.NaN), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedGreaterThanOrEqual_LogicalNot(Vector128.Create(42.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarUnorderedGreaterThanOrEqual_LogicalNot(Vector128.Create(41.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedGreaterThanOrEqual_LogicalNot(Vector128.Create(42.0f), Vector128.Create(41.0f))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarUnorderedGreaterThanOrEqual_LogicalNot(Vector128.Create(42.0f), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarUnorderedGreaterThanOrEqual_LogicalNot(Vector128.Create(float.NaN), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarUnorderedGreaterThanOrEqual_Branch(Vector128.Create(42.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedGreaterThanOrEqual_Branch(Vector128.Create(41.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarUnorderedGreaterThanOrEqual_Branch(Vector128.Create(42.0f), Vector128.Create(41.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedGreaterThanOrEqual_Branch(Vector128.Create(42.0f), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedGreaterThanOrEqual_Branch(Vector128.Create(float.NaN), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarUnorderedGreaterThanOrEqual_Swap(Vector128.Create(42.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedGreaterThanOrEqual_Swap(Vector128.Create(41.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarUnorderedGreaterThanOrEqual_Swap(Vector128.Create(42.0f), Vector128.Create(41.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedGreaterThanOrEqual_Swap(Vector128.Create(42.0f), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedGreaterThanOrEqual_Swap(Vector128.Create(float.NaN), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarUnorderedGreaterThanOrEqual_Branch_Swap(Vector128.Create(42.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedGreaterThanOrEqual_Branch_Swap(Vector128.Create(41.0f), Vector128.Create(42.0f))); + r &= !Sse.IsSupported || Check(true, Test_Sse_CompareScalarUnorderedGreaterThanOrEqual_Branch_Swap(Vector128.Create(42.0f), Vector128.Create(41.0f))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedGreaterThanOrEqual_Branch_Swap(Vector128.Create(42.0f), Vector128.Create(float.NaN))); + r &= !Sse.IsSupported || Check(false, Test_Sse_CompareScalarUnorderedGreaterThanOrEqual_Branch_Swap(Vector128.Create(float.NaN), Vector128.Create(float.NaN))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarUnorderedGreaterThanOrEqual_Normal(Vector128.Create(42.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedGreaterThanOrEqual_Normal(Vector128.Create(41.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarUnorderedGreaterThanOrEqual_Normal(Vector128.Create(42.0), Vector128.Create(41.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedGreaterThanOrEqual_Normal(Vector128.Create(42.0), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedGreaterThanOrEqual_Normal(Vector128.Create(double.NaN), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedGreaterThanOrEqual_LogicalNot(Vector128.Create(42.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarUnorderedGreaterThanOrEqual_LogicalNot(Vector128.Create(41.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedGreaterThanOrEqual_LogicalNot(Vector128.Create(42.0), Vector128.Create(41.0))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarUnorderedGreaterThanOrEqual_LogicalNot(Vector128.Create(42.0), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarUnorderedGreaterThanOrEqual_LogicalNot(Vector128.Create(double.NaN), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarUnorderedGreaterThanOrEqual_Branch(Vector128.Create(42.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedGreaterThanOrEqual_Branch(Vector128.Create(41.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarUnorderedGreaterThanOrEqual_Branch(Vector128.Create(42.0), Vector128.Create(41.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedGreaterThanOrEqual_Branch(Vector128.Create(42.0), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedGreaterThanOrEqual_Branch(Vector128.Create(double.NaN), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarUnorderedGreaterThanOrEqual_Swap(Vector128.Create(42.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedGreaterThanOrEqual_Swap(Vector128.Create(41.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarUnorderedGreaterThanOrEqual_Swap(Vector128.Create(42.0), Vector128.Create(41.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedGreaterThanOrEqual_Swap(Vector128.Create(42.0), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedGreaterThanOrEqual_Swap(Vector128.Create(double.NaN), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarUnorderedGreaterThanOrEqual_Branch_Swap(Vector128.Create(42.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedGreaterThanOrEqual_Branch_Swap(Vector128.Create(41.0), Vector128.Create(42.0))); + r &= !Sse2.IsSupported || Check(true, Test_Sse2_CompareScalarUnorderedGreaterThanOrEqual_Branch_Swap(Vector128.Create(42.0), Vector128.Create(41.0))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedGreaterThanOrEqual_Branch_Swap(Vector128.Create(42.0), Vector128.Create(double.NaN))); + r &= !Sse2.IsSupported || Check(false, Test_Sse2_CompareScalarUnorderedGreaterThanOrEqual_Branch_Swap(Vector128.Create(double.NaN), Vector128.Create(double.NaN))); + r &= !Sse41.IsSupported || Check(true, Test_Sse41_TestZ_Normal(Vector128.Create(0), Vector128.Create(0))); + r &= !Sse41.IsSupported || Check(true, Test_Sse41_TestZ_Normal(Vector128.Create(1), Vector128.Create(2))); + r &= !Sse41.IsSupported || Check(false, Test_Sse41_TestZ_Normal(Vector128.Create(2), Vector128.Create(3))); + r &= !Sse41.IsSupported || Check(false, Test_Sse41_TestZ_Normal(Vector128.Create(3), Vector128.Create(2))); + r &= !Sse41.IsSupported || Check(false, Test_Sse41_TestZ_LogicalNot(Vector128.Create(0), Vector128.Create(0))); + r &= !Sse41.IsSupported || Check(false, Test_Sse41_TestZ_LogicalNot(Vector128.Create(1), Vector128.Create(2))); + r &= !Sse41.IsSupported || Check(true, Test_Sse41_TestZ_LogicalNot(Vector128.Create(2), Vector128.Create(3))); + r &= !Sse41.IsSupported || Check(true, Test_Sse41_TestZ_LogicalNot(Vector128.Create(3), Vector128.Create(2))); + r &= !Sse41.IsSupported || Check(true, Test_Sse41_TestZ_Branch(Vector128.Create(0), Vector128.Create(0))); + r &= !Sse41.IsSupported || Check(true, Test_Sse41_TestZ_Branch(Vector128.Create(1), Vector128.Create(2))); + r &= !Sse41.IsSupported || Check(false, Test_Sse41_TestZ_Branch(Vector128.Create(2), Vector128.Create(3))); + r &= !Sse41.IsSupported || Check(false, Test_Sse41_TestZ_Branch(Vector128.Create(3), Vector128.Create(2))); + r &= !Sse41.IsSupported || Check(true, Test_Sse41_TestZ_Swap(Vector128.Create(0), Vector128.Create(0))); + r &= !Sse41.IsSupported || Check(true, Test_Sse41_TestZ_Swap(Vector128.Create(1), Vector128.Create(2))); + r &= !Sse41.IsSupported || Check(false, Test_Sse41_TestZ_Swap(Vector128.Create(2), Vector128.Create(3))); + r &= !Sse41.IsSupported || Check(false, Test_Sse41_TestZ_Swap(Vector128.Create(3), Vector128.Create(2))); + r &= !Sse41.IsSupported || Check(false, Test_Sse41_TestZ_LogicalNot_Swap(Vector128.Create(0), Vector128.Create(0))); + r &= !Sse41.IsSupported || Check(false, Test_Sse41_TestZ_LogicalNot_Swap(Vector128.Create(1), Vector128.Create(2))); + r &= !Sse41.IsSupported || Check(true, Test_Sse41_TestZ_LogicalNot_Swap(Vector128.Create(2), Vector128.Create(3))); + r &= !Sse41.IsSupported || Check(true, Test_Sse41_TestZ_LogicalNot_Swap(Vector128.Create(3), Vector128.Create(2))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestZ_Normal(Vector128.Create(0), Vector128.Create(0))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestZ_Normal(Vector128.Create(1), Vector128.Create(2))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestZ_Normal(Vector128.Create(2), Vector128.Create(3))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestZ_Normal(Vector128.Create(3), Vector128.Create(2))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestZ_LogicalNot(Vector128.Create(0), Vector128.Create(0))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestZ_LogicalNot(Vector128.Create(1), Vector128.Create(2))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestZ_LogicalNot(Vector128.Create(2), Vector128.Create(3))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestZ_LogicalNot(Vector128.Create(3), Vector128.Create(2))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestZ_Branch(Vector128.Create(0), Vector128.Create(0))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestZ_Branch(Vector128.Create(1), Vector128.Create(2))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestZ_Branch(Vector128.Create(2), Vector128.Create(3))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestZ_Branch(Vector128.Create(3), Vector128.Create(2))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestZ_Swap(Vector128.Create(0), Vector128.Create(0))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestZ_Swap(Vector128.Create(1), Vector128.Create(2))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestZ_Swap(Vector128.Create(2), Vector128.Create(3))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestZ_Swap(Vector128.Create(3), Vector128.Create(2))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestZ_LogicalNot_Swap(Vector128.Create(0), Vector128.Create(0))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestZ_LogicalNot_Swap(Vector128.Create(1), Vector128.Create(2))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestZ_LogicalNot_Swap(Vector128.Create(2), Vector128.Create(3))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestZ_LogicalNot_Swap(Vector128.Create(3), Vector128.Create(2))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestZ_Normal(Vector256.Create(0), Vector256.Create(0))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestZ_Normal(Vector256.Create(1), Vector256.Create(2))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestZ_Normal(Vector256.Create(2), Vector256.Create(3))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestZ_Normal(Vector256.Create(3), Vector256.Create(2))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestZ_LogicalNot(Vector256.Create(0), Vector256.Create(0))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestZ_LogicalNot(Vector256.Create(1), Vector256.Create(2))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestZ_LogicalNot(Vector256.Create(2), Vector256.Create(3))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestZ_LogicalNot(Vector256.Create(3), Vector256.Create(2))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestZ_Branch(Vector256.Create(0), Vector256.Create(0))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestZ_Branch(Vector256.Create(1), Vector256.Create(2))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestZ_Branch(Vector256.Create(2), Vector256.Create(3))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestZ_Branch(Vector256.Create(3), Vector256.Create(2))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestZ_Swap(Vector256.Create(0), Vector256.Create(0))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestZ_Swap(Vector256.Create(1), Vector256.Create(2))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestZ_Swap(Vector256.Create(2), Vector256.Create(3))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestZ_Swap(Vector256.Create(3), Vector256.Create(2))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestZ_LogicalNot_Swap(Vector256.Create(0), Vector256.Create(0))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestZ_LogicalNot_Swap(Vector256.Create(1), Vector256.Create(2))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestZ_LogicalNot_Swap(Vector256.Create(2), Vector256.Create(3))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestZ_LogicalNot_Swap(Vector256.Create(3), Vector256.Create(2))); + r &= !Sse41.IsSupported || Check(true, Test_Sse41_TestC_Normal(Vector128.Create(0), Vector128.Create(0))); + r &= !Sse41.IsSupported || Check(false, Test_Sse41_TestC_Normal(Vector128.Create(1), Vector128.Create(2))); + r &= !Sse41.IsSupported || Check(false, Test_Sse41_TestC_Normal(Vector128.Create(2), Vector128.Create(3))); + r &= !Sse41.IsSupported || Check(true, Test_Sse41_TestC_Normal(Vector128.Create(3), Vector128.Create(2))); + r &= !Sse41.IsSupported || Check(false, Test_Sse41_TestC_LogicalNot(Vector128.Create(0), Vector128.Create(0))); + r &= !Sse41.IsSupported || Check(true, Test_Sse41_TestC_LogicalNot(Vector128.Create(1), Vector128.Create(2))); + r &= !Sse41.IsSupported || Check(true, Test_Sse41_TestC_LogicalNot(Vector128.Create(2), Vector128.Create(3))); + r &= !Sse41.IsSupported || Check(false, Test_Sse41_TestC_LogicalNot(Vector128.Create(3), Vector128.Create(2))); + r &= !Sse41.IsSupported || Check(true, Test_Sse41_TestC_Branch(Vector128.Create(0), Vector128.Create(0))); + r &= !Sse41.IsSupported || Check(false, Test_Sse41_TestC_Branch(Vector128.Create(1), Vector128.Create(2))); + r &= !Sse41.IsSupported || Check(false, Test_Sse41_TestC_Branch(Vector128.Create(2), Vector128.Create(3))); + r &= !Sse41.IsSupported || Check(true, Test_Sse41_TestC_Branch(Vector128.Create(3), Vector128.Create(2))); + r &= !Sse41.IsSupported || Check(true, Test_Sse41_TestC_Swap(Vector128.Create(0), Vector128.Create(0))); + r &= !Sse41.IsSupported || Check(false, Test_Sse41_TestC_Swap(Vector128.Create(1), Vector128.Create(2))); + r &= !Sse41.IsSupported || Check(false, Test_Sse41_TestC_Swap(Vector128.Create(2), Vector128.Create(3))); + r &= !Sse41.IsSupported || Check(true, Test_Sse41_TestC_Swap(Vector128.Create(3), Vector128.Create(2))); + r &= !Sse41.IsSupported || Check(false, Test_Sse41_TestC_LogicalNot_Swap(Vector128.Create(0), Vector128.Create(0))); + r &= !Sse41.IsSupported || Check(true, Test_Sse41_TestC_LogicalNot_Swap(Vector128.Create(1), Vector128.Create(2))); + r &= !Sse41.IsSupported || Check(true, Test_Sse41_TestC_LogicalNot_Swap(Vector128.Create(2), Vector128.Create(3))); + r &= !Sse41.IsSupported || Check(false, Test_Sse41_TestC_LogicalNot_Swap(Vector128.Create(3), Vector128.Create(2))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestC_Normal(Vector128.Create(0), Vector128.Create(0))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestC_Normal(Vector128.Create(1), Vector128.Create(2))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestC_Normal(Vector128.Create(2), Vector128.Create(3))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestC_Normal(Vector128.Create(3), Vector128.Create(2))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestC_LogicalNot(Vector128.Create(0), Vector128.Create(0))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestC_LogicalNot(Vector128.Create(1), Vector128.Create(2))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestC_LogicalNot(Vector128.Create(2), Vector128.Create(3))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestC_LogicalNot(Vector128.Create(3), Vector128.Create(2))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestC_Branch(Vector128.Create(0), Vector128.Create(0))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestC_Branch(Vector128.Create(1), Vector128.Create(2))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestC_Branch(Vector128.Create(2), Vector128.Create(3))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestC_Branch(Vector128.Create(3), Vector128.Create(2))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestC_Swap(Vector128.Create(0), Vector128.Create(0))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestC_Swap(Vector128.Create(1), Vector128.Create(2))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestC_Swap(Vector128.Create(2), Vector128.Create(3))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestC_Swap(Vector128.Create(3), Vector128.Create(2))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestC_LogicalNot_Swap(Vector128.Create(0), Vector128.Create(0))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestC_LogicalNot_Swap(Vector128.Create(1), Vector128.Create(2))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestC_LogicalNot_Swap(Vector128.Create(2), Vector128.Create(3))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestC_LogicalNot_Swap(Vector128.Create(3), Vector128.Create(2))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestC_Normal(Vector256.Create(0), Vector256.Create(0))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestC_Normal(Vector256.Create(1), Vector256.Create(2))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestC_Normal(Vector256.Create(2), Vector256.Create(3))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestC_Normal(Vector256.Create(3), Vector256.Create(2))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestC_LogicalNot(Vector256.Create(0), Vector256.Create(0))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestC_LogicalNot(Vector256.Create(1), Vector256.Create(2))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestC_LogicalNot(Vector256.Create(2), Vector256.Create(3))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestC_LogicalNot(Vector256.Create(3), Vector256.Create(2))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestC_Branch(Vector256.Create(0), Vector256.Create(0))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestC_Branch(Vector256.Create(1), Vector256.Create(2))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestC_Branch(Vector256.Create(2), Vector256.Create(3))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestC_Branch(Vector256.Create(3), Vector256.Create(2))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestC_Swap(Vector256.Create(0), Vector256.Create(0))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestC_Swap(Vector256.Create(1), Vector256.Create(2))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestC_Swap(Vector256.Create(2), Vector256.Create(3))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestC_Swap(Vector256.Create(3), Vector256.Create(2))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestC_LogicalNot_Swap(Vector256.Create(0), Vector256.Create(0))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestC_LogicalNot_Swap(Vector256.Create(1), Vector256.Create(2))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestC_LogicalNot_Swap(Vector256.Create(2), Vector256.Create(3))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestC_LogicalNot_Swap(Vector256.Create(3), Vector256.Create(2))); + r &= !Sse41.IsSupported || Check(false, Test_Sse41_TestNotZAndNotC_Normal(Vector128.Create(0), Vector128.Create(0))); + r &= !Sse41.IsSupported || Check(false, Test_Sse41_TestNotZAndNotC_Normal(Vector128.Create(1), Vector128.Create(2))); + r &= !Sse41.IsSupported || Check(true, Test_Sse41_TestNotZAndNotC_Normal(Vector128.Create(2), Vector128.Create(3))); + r &= !Sse41.IsSupported || Check(false, Test_Sse41_TestNotZAndNotC_Normal(Vector128.Create(3), Vector128.Create(2))); + r &= !Sse41.IsSupported || Check(true, Test_Sse41_TestNotZAndNotC_LogicalNot(Vector128.Create(0), Vector128.Create(0))); + r &= !Sse41.IsSupported || Check(true, Test_Sse41_TestNotZAndNotC_LogicalNot(Vector128.Create(1), Vector128.Create(2))); + r &= !Sse41.IsSupported || Check(false, Test_Sse41_TestNotZAndNotC_LogicalNot(Vector128.Create(2), Vector128.Create(3))); + r &= !Sse41.IsSupported || Check(true, Test_Sse41_TestNotZAndNotC_LogicalNot(Vector128.Create(3), Vector128.Create(2))); + r &= !Sse41.IsSupported || Check(false, Test_Sse41_TestNotZAndNotC_Branch(Vector128.Create(0), Vector128.Create(0))); + r &= !Sse41.IsSupported || Check(false, Test_Sse41_TestNotZAndNotC_Branch(Vector128.Create(1), Vector128.Create(2))); + r &= !Sse41.IsSupported || Check(true, Test_Sse41_TestNotZAndNotC_Branch(Vector128.Create(2), Vector128.Create(3))); + r &= !Sse41.IsSupported || Check(false, Test_Sse41_TestNotZAndNotC_Branch(Vector128.Create(3), Vector128.Create(2))); + r &= !Sse41.IsSupported || Check(false, Test_Sse41_TestNotZAndNotC_Swap(Vector128.Create(0), Vector128.Create(0))); + r &= !Sse41.IsSupported || Check(false, Test_Sse41_TestNotZAndNotC_Swap(Vector128.Create(1), Vector128.Create(2))); + r &= !Sse41.IsSupported || Check(true, Test_Sse41_TestNotZAndNotC_Swap(Vector128.Create(2), Vector128.Create(3))); + r &= !Sse41.IsSupported || Check(false, Test_Sse41_TestNotZAndNotC_Swap(Vector128.Create(3), Vector128.Create(2))); + r &= !Sse41.IsSupported || Check(true, Test_Sse41_TestNotZAndNotC_LogicalNot_Swap(Vector128.Create(0), Vector128.Create(0))); + r &= !Sse41.IsSupported || Check(true, Test_Sse41_TestNotZAndNotC_LogicalNot_Swap(Vector128.Create(1), Vector128.Create(2))); + r &= !Sse41.IsSupported || Check(false, Test_Sse41_TestNotZAndNotC_LogicalNot_Swap(Vector128.Create(2), Vector128.Create(3))); + r &= !Sse41.IsSupported || Check(true, Test_Sse41_TestNotZAndNotC_LogicalNot_Swap(Vector128.Create(3), Vector128.Create(2))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestNotZAndNotC_Normal(Vector128.Create(0), Vector128.Create(0))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestNotZAndNotC_Normal(Vector128.Create(1), Vector128.Create(2))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestNotZAndNotC_Normal(Vector128.Create(2), Vector128.Create(3))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestNotZAndNotC_Normal(Vector128.Create(3), Vector128.Create(2))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestNotZAndNotC_LogicalNot(Vector128.Create(0), Vector128.Create(0))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestNotZAndNotC_LogicalNot(Vector128.Create(1), Vector128.Create(2))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestNotZAndNotC_LogicalNot(Vector128.Create(2), Vector128.Create(3))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestNotZAndNotC_LogicalNot(Vector128.Create(3), Vector128.Create(2))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestNotZAndNotC_Branch(Vector128.Create(0), Vector128.Create(0))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestNotZAndNotC_Branch(Vector128.Create(1), Vector128.Create(2))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestNotZAndNotC_Branch(Vector128.Create(2), Vector128.Create(3))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestNotZAndNotC_Branch(Vector128.Create(3), Vector128.Create(2))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestNotZAndNotC_Swap(Vector128.Create(0), Vector128.Create(0))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestNotZAndNotC_Swap(Vector128.Create(1), Vector128.Create(2))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestNotZAndNotC_Swap(Vector128.Create(2), Vector128.Create(3))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestNotZAndNotC_Swap(Vector128.Create(3), Vector128.Create(2))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestNotZAndNotC_LogicalNot_Swap(Vector128.Create(0), Vector128.Create(0))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestNotZAndNotC_LogicalNot_Swap(Vector128.Create(1), Vector128.Create(2))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestNotZAndNotC_LogicalNot_Swap(Vector128.Create(2), Vector128.Create(3))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestNotZAndNotC_LogicalNot_Swap(Vector128.Create(3), Vector128.Create(2))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestNotZAndNotC_Normal(Vector256.Create(0), Vector256.Create(0))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestNotZAndNotC_Normal(Vector256.Create(1), Vector256.Create(2))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestNotZAndNotC_Normal(Vector256.Create(2), Vector256.Create(3))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestNotZAndNotC_Normal(Vector256.Create(3), Vector256.Create(2))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestNotZAndNotC_LogicalNot(Vector256.Create(0), Vector256.Create(0))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestNotZAndNotC_LogicalNot(Vector256.Create(1), Vector256.Create(2))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestNotZAndNotC_LogicalNot(Vector256.Create(2), Vector256.Create(3))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestNotZAndNotC_LogicalNot(Vector256.Create(3), Vector256.Create(2))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestNotZAndNotC_Branch(Vector256.Create(0), Vector256.Create(0))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestNotZAndNotC_Branch(Vector256.Create(1), Vector256.Create(2))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestNotZAndNotC_Branch(Vector256.Create(2), Vector256.Create(3))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestNotZAndNotC_Branch(Vector256.Create(3), Vector256.Create(2))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestNotZAndNotC_Swap(Vector256.Create(0), Vector256.Create(0))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestNotZAndNotC_Swap(Vector256.Create(1), Vector256.Create(2))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestNotZAndNotC_Swap(Vector256.Create(2), Vector256.Create(3))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestNotZAndNotC_Swap(Vector256.Create(3), Vector256.Create(2))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestNotZAndNotC_LogicalNot_Swap(Vector256.Create(0), Vector256.Create(0))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestNotZAndNotC_LogicalNot_Swap(Vector256.Create(1), Vector256.Create(2))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestNotZAndNotC_LogicalNot_Swap(Vector256.Create(2), Vector256.Create(3))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestNotZAndNotC_LogicalNot_Swap(Vector256.Create(3), Vector256.Create(2))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestZ_Normal(Vector128.Create(1.0f), Vector128.Create(1.0f))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestZ_Normal(Vector128.Create(1.0f), Vector128.Create(-1.0f))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestZ_Normal(Vector128.Create(-1.0f), Vector128.Create(-1.0f))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestZ_LogicalNot(Vector128.Create(1.0f), Vector128.Create(1.0f))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestZ_LogicalNot(Vector128.Create(1.0f), Vector128.Create(-1.0f))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestZ_LogicalNot(Vector128.Create(-1.0f), Vector128.Create(-1.0f))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestZ_Branch(Vector128.Create(1.0f), Vector128.Create(1.0f))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestZ_Branch(Vector128.Create(1.0f), Vector128.Create(-1.0f))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestZ_Branch(Vector128.Create(-1.0f), Vector128.Create(-1.0f))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestZ_Swap(Vector128.Create(1.0f), Vector128.Create(1.0f))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestZ_Swap(Vector128.Create(1.0f), Vector128.Create(-1.0f))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestZ_Swap(Vector128.Create(-1.0f), Vector128.Create(-1.0f))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestZ_LogicalNot_Branch_Swap(Vector128.Create(1.0f), Vector128.Create(1.0f))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestZ_LogicalNot_Branch_Swap(Vector128.Create(1.0f), Vector128.Create(-1.0f))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestZ_LogicalNot_Branch_Swap(Vector128.Create(-1.0f), Vector128.Create(-1.0f))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestZ_Normal(Vector256.Create(1.0f), Vector256.Create(1.0f))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestZ_Normal(Vector256.Create(1.0f), Vector256.Create(-1.0f))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestZ_Normal(Vector256.Create(-1.0f), Vector256.Create(-1.0f))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestZ_LogicalNot(Vector256.Create(1.0f), Vector256.Create(1.0f))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestZ_LogicalNot(Vector256.Create(1.0f), Vector256.Create(-1.0f))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestZ_LogicalNot(Vector256.Create(-1.0f), Vector256.Create(-1.0f))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestZ_Branch(Vector256.Create(1.0f), Vector256.Create(1.0f))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestZ_Branch(Vector256.Create(1.0f), Vector256.Create(-1.0f))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestZ_Branch(Vector256.Create(-1.0f), Vector256.Create(-1.0f))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestZ_Swap(Vector256.Create(1.0f), Vector256.Create(1.0f))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestZ_Swap(Vector256.Create(1.0f), Vector256.Create(-1.0f))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestZ_Swap(Vector256.Create(-1.0f), Vector256.Create(-1.0f))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestZ_LogicalNot_Branch_Swap(Vector256.Create(1.0f), Vector256.Create(1.0f))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestZ_LogicalNot_Branch_Swap(Vector256.Create(1.0f), Vector256.Create(-1.0f))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestZ_LogicalNot_Branch_Swap(Vector256.Create(-1.0f), Vector256.Create(-1.0f))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestC_Normal(Vector128.Create(1.0f), Vector128.Create(1.0f))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestC_Normal(Vector128.Create(1.0f), Vector128.Create(-1.0f))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestC_Normal(Vector128.Create(-1.0f), Vector128.Create(-1.0f))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestC_LogicalNot(Vector128.Create(1.0f), Vector128.Create(1.0f))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestC_LogicalNot(Vector128.Create(1.0f), Vector128.Create(-1.0f))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestC_LogicalNot(Vector128.Create(-1.0f), Vector128.Create(-1.0f))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestC_Branch(Vector128.Create(1.0f), Vector128.Create(1.0f))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestC_Branch(Vector128.Create(1.0f), Vector128.Create(-1.0f))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestC_Branch(Vector128.Create(-1.0f), Vector128.Create(-1.0f))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestC_Swap(Vector128.Create(1.0f), Vector128.Create(1.0f))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestC_Swap(Vector128.Create(1.0f), Vector128.Create(-1.0f))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestC_Swap(Vector128.Create(-1.0f), Vector128.Create(-1.0f))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestC_LogicalNot_Branch_Swap(Vector128.Create(1.0f), Vector128.Create(1.0f))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestC_LogicalNot_Branch_Swap(Vector128.Create(1.0f), Vector128.Create(-1.0f))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestC_LogicalNot_Branch_Swap(Vector128.Create(-1.0f), Vector128.Create(-1.0f))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestC_Normal(Vector256.Create(1.0f), Vector256.Create(1.0f))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestC_Normal(Vector256.Create(1.0f), Vector256.Create(-1.0f))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestC_Normal(Vector256.Create(-1.0f), Vector256.Create(-1.0f))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestC_LogicalNot(Vector256.Create(1.0f), Vector256.Create(1.0f))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestC_LogicalNot(Vector256.Create(1.0f), Vector256.Create(-1.0f))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestC_LogicalNot(Vector256.Create(-1.0f), Vector256.Create(-1.0f))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestC_Branch(Vector256.Create(1.0f), Vector256.Create(1.0f))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestC_Branch(Vector256.Create(1.0f), Vector256.Create(-1.0f))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestC_Branch(Vector256.Create(-1.0f), Vector256.Create(-1.0f))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestC_Swap(Vector256.Create(1.0f), Vector256.Create(1.0f))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestC_Swap(Vector256.Create(1.0f), Vector256.Create(-1.0f))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestC_Swap(Vector256.Create(-1.0f), Vector256.Create(-1.0f))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestC_LogicalNot_Branch_Swap(Vector256.Create(1.0f), Vector256.Create(1.0f))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestC_LogicalNot_Branch_Swap(Vector256.Create(1.0f), Vector256.Create(-1.0f))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestC_LogicalNot_Branch_Swap(Vector256.Create(-1.0f), Vector256.Create(-1.0f))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestNotZAndNotC_Normal(Vector128.Create(1.0f), Vector128.Create(1.0f))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestNotZAndNotC_Normal(Vector128.Create(1.0f), Vector128.Create(-1.0f))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestNotZAndNotC_Normal(Vector128.Create(-1.0f), Vector128.Create(-1.0f))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestNotZAndNotC_LogicalNot(Vector128.Create(1.0f), Vector128.Create(1.0f))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestNotZAndNotC_LogicalNot(Vector128.Create(1.0f), Vector128.Create(-1.0f))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestNotZAndNotC_LogicalNot(Vector128.Create(-1.0f), Vector128.Create(-1.0f))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestNotZAndNotC_Branch(Vector128.Create(1.0f), Vector128.Create(1.0f))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestNotZAndNotC_Branch(Vector128.Create(1.0f), Vector128.Create(-1.0f))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestNotZAndNotC_Branch(Vector128.Create(-1.0f), Vector128.Create(-1.0f))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestNotZAndNotC_Swap(Vector128.Create(1.0f), Vector128.Create(1.0f))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestNotZAndNotC_Swap(Vector128.Create(1.0f), Vector128.Create(-1.0f))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestNotZAndNotC_Swap(Vector128.Create(-1.0f), Vector128.Create(-1.0f))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestNotZAndNotC_LogicalNot_Branch_Swap(Vector128.Create(1.0f), Vector128.Create(1.0f))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestNotZAndNotC_LogicalNot_Branch_Swap(Vector128.Create(1.0f), Vector128.Create(-1.0f))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestNotZAndNotC_LogicalNot_Branch_Swap(Vector128.Create(-1.0f), Vector128.Create(-1.0f))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestNotZAndNotC_Normal(Vector256.Create(1.0f), Vector256.Create(1.0f))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestNotZAndNotC_Normal(Vector256.Create(1.0f), Vector256.Create(-1.0f))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestNotZAndNotC_Normal(Vector256.Create(-1.0f), Vector256.Create(-1.0f))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestNotZAndNotC_LogicalNot(Vector256.Create(1.0f), Vector256.Create(1.0f))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestNotZAndNotC_LogicalNot(Vector256.Create(1.0f), Vector256.Create(-1.0f))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestNotZAndNotC_LogicalNot(Vector256.Create(-1.0f), Vector256.Create(-1.0f))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestNotZAndNotC_Branch(Vector256.Create(1.0f), Vector256.Create(1.0f))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestNotZAndNotC_Branch(Vector256.Create(1.0f), Vector256.Create(-1.0f))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestNotZAndNotC_Branch(Vector256.Create(-1.0f), Vector256.Create(-1.0f))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestNotZAndNotC_Swap(Vector256.Create(1.0f), Vector256.Create(1.0f))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestNotZAndNotC_Swap(Vector256.Create(1.0f), Vector256.Create(-1.0f))); + r &= !Avx.IsSupported || Check(false, Test_Avx_TestNotZAndNotC_Swap(Vector256.Create(-1.0f), Vector256.Create(-1.0f))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestNotZAndNotC_LogicalNot_Branch_Swap(Vector256.Create(1.0f), Vector256.Create(1.0f))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestNotZAndNotC_LogicalNot_Branch_Swap(Vector256.Create(1.0f), Vector256.Create(-1.0f))); + r &= !Avx.IsSupported || Check(true, Test_Avx_TestNotZAndNotC_LogicalNot_Branch_Swap(Vector256.Create(-1.0f), Vector256.Create(-1.0f))); + return r ? 100 : 42; + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse_CompareScalarOrderedEqual_Normal(in Vector128 x, in Vector128 y) + { + return Sse.CompareScalarOrderedEqual(x, y); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse_CompareScalarOrderedEqual_LogicalNot(in Vector128 x, in Vector128 y) + { + return !Sse.CompareScalarOrderedEqual(x, y); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse_CompareScalarOrderedEqual_Branch(in Vector128 x, in Vector128 y) + { + return Sse.CompareScalarOrderedEqual(x, y) ? True() : False(); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse_CompareScalarOrderedEqual_Swap(in Vector128 x, in Vector128 y) + { + return Sse.CompareScalarOrderedEqual(x, Sse.Or(y.AsSingle(), default).AsSingle()); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse_CompareScalarOrderedEqual_Branch_Swap(in Vector128 x, in Vector128 y) + { + return Sse.CompareScalarOrderedEqual(x, Sse.Or(y.AsSingle(), default).AsSingle()) ? True() : False(); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse2_CompareScalarOrderedEqual_Normal(in Vector128 x, in Vector128 y) + { + return Sse2.CompareScalarOrderedEqual(x, y); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse2_CompareScalarOrderedEqual_LogicalNot(in Vector128 x, in Vector128 y) + { + return !Sse2.CompareScalarOrderedEqual(x, y); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse2_CompareScalarOrderedEqual_Branch(in Vector128 x, in Vector128 y) + { + return Sse2.CompareScalarOrderedEqual(x, y) ? True() : False(); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse2_CompareScalarOrderedEqual_Swap(in Vector128 x, in Vector128 y) + { + return Sse2.CompareScalarOrderedEqual(x, Sse2.Or(y.AsSingle(), default).AsDouble()); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse2_CompareScalarOrderedEqual_Branch_Swap(in Vector128 x, in Vector128 y) + { + return Sse2.CompareScalarOrderedEqual(x, Sse2.Or(y.AsSingle(), default).AsDouble()) ? True() : False(); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse_CompareScalarOrderedNotEqual_Normal(in Vector128 x, in Vector128 y) + { + return Sse.CompareScalarOrderedNotEqual(x, y); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse_CompareScalarOrderedNotEqual_LogicalNot(in Vector128 x, in Vector128 y) + { + return !Sse.CompareScalarOrderedNotEqual(x, y); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse_CompareScalarOrderedNotEqual_Branch(in Vector128 x, in Vector128 y) + { + return Sse.CompareScalarOrderedNotEqual(x, y) ? True() : False(); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse_CompareScalarOrderedNotEqual_Swap(in Vector128 x, in Vector128 y) + { + return Sse.CompareScalarOrderedNotEqual(x, Sse.Or(y.AsSingle(), default).AsSingle()); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse_CompareScalarOrderedNotEqual_Branch_Swap(in Vector128 x, in Vector128 y) + { + return Sse.CompareScalarOrderedNotEqual(x, Sse.Or(y.AsSingle(), default).AsSingle()) ? True() : False(); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse2_CompareScalarOrderedNotEqual_Normal(in Vector128 x, in Vector128 y) + { + return Sse2.CompareScalarOrderedNotEqual(x, y); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse2_CompareScalarOrderedNotEqual_LogicalNot(in Vector128 x, in Vector128 y) + { + return !Sse2.CompareScalarOrderedNotEqual(x, y); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse2_CompareScalarOrderedNotEqual_Branch(in Vector128 x, in Vector128 y) + { + return Sse2.CompareScalarOrderedNotEqual(x, y) ? True() : False(); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse2_CompareScalarOrderedNotEqual_Swap(in Vector128 x, in Vector128 y) + { + return Sse2.CompareScalarOrderedNotEqual(x, Sse2.Or(y.AsSingle(), default).AsDouble()); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse2_CompareScalarOrderedNotEqual_Branch_Swap(in Vector128 x, in Vector128 y) + { + return Sse2.CompareScalarOrderedNotEqual(x, Sse2.Or(y.AsSingle(), default).AsDouble()) ? True() : False(); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse_CompareScalarOrderedLessThan_Normal(in Vector128 x, in Vector128 y) + { + return Sse.CompareScalarOrderedLessThan(x, y); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse_CompareScalarOrderedLessThan_LogicalNot(in Vector128 x, in Vector128 y) + { + return !Sse.CompareScalarOrderedLessThan(x, y); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse_CompareScalarOrderedLessThan_Branch(in Vector128 x, in Vector128 y) + { + return Sse.CompareScalarOrderedLessThan(x, y) ? True() : False(); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse_CompareScalarOrderedLessThan_Swap(in Vector128 x, in Vector128 y) + { + return Sse.CompareScalarOrderedLessThan(x, Sse.Or(y.AsSingle(), default).AsSingle()); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse_CompareScalarOrderedLessThan_Branch_Swap(in Vector128 x, in Vector128 y) + { + return Sse.CompareScalarOrderedLessThan(x, Sse.Or(y.AsSingle(), default).AsSingle()) ? True() : False(); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse2_CompareScalarOrderedLessThan_Normal(in Vector128 x, in Vector128 y) + { + return Sse2.CompareScalarOrderedLessThan(x, y); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse2_CompareScalarOrderedLessThan_LogicalNot(in Vector128 x, in Vector128 y) + { + return !Sse2.CompareScalarOrderedLessThan(x, y); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse2_CompareScalarOrderedLessThan_Branch(in Vector128 x, in Vector128 y) + { + return Sse2.CompareScalarOrderedLessThan(x, y) ? True() : False(); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse2_CompareScalarOrderedLessThan_Swap(in Vector128 x, in Vector128 y) + { + return Sse2.CompareScalarOrderedLessThan(x, Sse2.Or(y.AsSingle(), default).AsDouble()); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse2_CompareScalarOrderedLessThan_Branch_Swap(in Vector128 x, in Vector128 y) + { + return Sse2.CompareScalarOrderedLessThan(x, Sse2.Or(y.AsSingle(), default).AsDouble()) ? True() : False(); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse_CompareScalarOrderedLessThanOrEqual_Normal(in Vector128 x, in Vector128 y) + { + return Sse.CompareScalarOrderedLessThanOrEqual(x, y); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse_CompareScalarOrderedLessThanOrEqual_LogicalNot(in Vector128 x, in Vector128 y) + { + return !Sse.CompareScalarOrderedLessThanOrEqual(x, y); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse_CompareScalarOrderedLessThanOrEqual_Branch(in Vector128 x, in Vector128 y) + { + return Sse.CompareScalarOrderedLessThanOrEqual(x, y) ? True() : False(); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse_CompareScalarOrderedLessThanOrEqual_Swap(in Vector128 x, in Vector128 y) + { + return Sse.CompareScalarOrderedLessThanOrEqual(x, Sse.Or(y.AsSingle(), default).AsSingle()); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse_CompareScalarOrderedLessThanOrEqual_Branch_Swap(in Vector128 x, in Vector128 y) + { + return Sse.CompareScalarOrderedLessThanOrEqual(x, Sse.Or(y.AsSingle(), default).AsSingle()) ? True() : False(); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse2_CompareScalarOrderedLessThanOrEqual_Normal(in Vector128 x, in Vector128 y) + { + return Sse2.CompareScalarOrderedLessThanOrEqual(x, y); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse2_CompareScalarOrderedLessThanOrEqual_LogicalNot(in Vector128 x, in Vector128 y) + { + return !Sse2.CompareScalarOrderedLessThanOrEqual(x, y); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse2_CompareScalarOrderedLessThanOrEqual_Branch(in Vector128 x, in Vector128 y) + { + return Sse2.CompareScalarOrderedLessThanOrEqual(x, y) ? True() : False(); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse2_CompareScalarOrderedLessThanOrEqual_Swap(in Vector128 x, in Vector128 y) + { + return Sse2.CompareScalarOrderedLessThanOrEqual(x, Sse2.Or(y.AsSingle(), default).AsDouble()); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse2_CompareScalarOrderedLessThanOrEqual_Branch_Swap(in Vector128 x, in Vector128 y) + { + return Sse2.CompareScalarOrderedLessThanOrEqual(x, Sse2.Or(y.AsSingle(), default).AsDouble()) ? True() : False(); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse_CompareScalarOrderedGreaterThan_Normal(in Vector128 x, in Vector128 y) + { + return Sse.CompareScalarOrderedGreaterThan(x, y); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse_CompareScalarOrderedGreaterThan_LogicalNot(in Vector128 x, in Vector128 y) + { + return !Sse.CompareScalarOrderedGreaterThan(x, y); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse_CompareScalarOrderedGreaterThan_Branch(in Vector128 x, in Vector128 y) + { + return Sse.CompareScalarOrderedGreaterThan(x, y) ? True() : False(); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse_CompareScalarOrderedGreaterThan_Swap(in Vector128 x, in Vector128 y) + { + return Sse.CompareScalarOrderedGreaterThan(x, Sse.Or(y.AsSingle(), default).AsSingle()); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse_CompareScalarOrderedGreaterThan_Branch_Swap(in Vector128 x, in Vector128 y) + { + return Sse.CompareScalarOrderedGreaterThan(x, Sse.Or(y.AsSingle(), default).AsSingle()) ? True() : False(); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse2_CompareScalarOrderedGreaterThan_Normal(in Vector128 x, in Vector128 y) + { + return Sse2.CompareScalarOrderedGreaterThan(x, y); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse2_CompareScalarOrderedGreaterThan_LogicalNot(in Vector128 x, in Vector128 y) + { + return !Sse2.CompareScalarOrderedGreaterThan(x, y); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse2_CompareScalarOrderedGreaterThan_Branch(in Vector128 x, in Vector128 y) + { + return Sse2.CompareScalarOrderedGreaterThan(x, y) ? True() : False(); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse2_CompareScalarOrderedGreaterThan_Swap(in Vector128 x, in Vector128 y) + { + return Sse2.CompareScalarOrderedGreaterThan(x, Sse2.Or(y.AsSingle(), default).AsDouble()); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse2_CompareScalarOrderedGreaterThan_Branch_Swap(in Vector128 x, in Vector128 y) + { + return Sse2.CompareScalarOrderedGreaterThan(x, Sse2.Or(y.AsSingle(), default).AsDouble()) ? True() : False(); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse_CompareScalarOrderedGreaterThanOrEqual_Normal(in Vector128 x, in Vector128 y) + { + return Sse.CompareScalarOrderedGreaterThanOrEqual(x, y); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse_CompareScalarOrderedGreaterThanOrEqual_LogicalNot(in Vector128 x, in Vector128 y) + { + return !Sse.CompareScalarOrderedGreaterThanOrEqual(x, y); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse_CompareScalarOrderedGreaterThanOrEqual_Branch(in Vector128 x, in Vector128 y) + { + return Sse.CompareScalarOrderedGreaterThanOrEqual(x, y) ? True() : False(); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse_CompareScalarOrderedGreaterThanOrEqual_Swap(in Vector128 x, in Vector128 y) + { + return Sse.CompareScalarOrderedGreaterThanOrEqual(x, Sse.Or(y.AsSingle(), default).AsSingle()); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse_CompareScalarOrderedGreaterThanOrEqual_Branch_Swap(in Vector128 x, in Vector128 y) + { + return Sse.CompareScalarOrderedGreaterThanOrEqual(x, Sse.Or(y.AsSingle(), default).AsSingle()) ? True() : False(); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse2_CompareScalarOrderedGreaterThanOrEqual_Normal(in Vector128 x, in Vector128 y) + { + return Sse2.CompareScalarOrderedGreaterThanOrEqual(x, y); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse2_CompareScalarOrderedGreaterThanOrEqual_LogicalNot(in Vector128 x, in Vector128 y) + { + return !Sse2.CompareScalarOrderedGreaterThanOrEqual(x, y); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse2_CompareScalarOrderedGreaterThanOrEqual_Branch(in Vector128 x, in Vector128 y) + { + return Sse2.CompareScalarOrderedGreaterThanOrEqual(x, y) ? True() : False(); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse2_CompareScalarOrderedGreaterThanOrEqual_Swap(in Vector128 x, in Vector128 y) + { + return Sse2.CompareScalarOrderedGreaterThanOrEqual(x, Sse2.Or(y.AsSingle(), default).AsDouble()); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse2_CompareScalarOrderedGreaterThanOrEqual_Branch_Swap(in Vector128 x, in Vector128 y) + { + return Sse2.CompareScalarOrderedGreaterThanOrEqual(x, Sse2.Or(y.AsSingle(), default).AsDouble()) ? True() : False(); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse_CompareScalarUnorderedEqual_Normal(in Vector128 x, in Vector128 y) + { + return Sse.CompareScalarUnorderedEqual(x, y); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse_CompareScalarUnorderedEqual_LogicalNot(in Vector128 x, in Vector128 y) + { + return !Sse.CompareScalarUnorderedEqual(x, y); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse_CompareScalarUnorderedEqual_Branch(in Vector128 x, in Vector128 y) + { + return Sse.CompareScalarUnorderedEqual(x, y) ? True() : False(); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse_CompareScalarUnorderedEqual_Swap(in Vector128 x, in Vector128 y) + { + return Sse.CompareScalarUnorderedEqual(x, Sse.Or(y.AsSingle(), default).AsSingle()); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse_CompareScalarUnorderedEqual_Branch_Swap(in Vector128 x, in Vector128 y) + { + return Sse.CompareScalarUnorderedEqual(x, Sse.Or(y.AsSingle(), default).AsSingle()) ? True() : False(); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse2_CompareScalarUnorderedEqual_Normal(in Vector128 x, in Vector128 y) + { + return Sse2.CompareScalarUnorderedEqual(x, y); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse2_CompareScalarUnorderedEqual_LogicalNot(in Vector128 x, in Vector128 y) + { + return !Sse2.CompareScalarUnorderedEqual(x, y); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse2_CompareScalarUnorderedEqual_Branch(in Vector128 x, in Vector128 y) + { + return Sse2.CompareScalarUnorderedEqual(x, y) ? True() : False(); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse2_CompareScalarUnorderedEqual_Swap(in Vector128 x, in Vector128 y) + { + return Sse2.CompareScalarUnorderedEqual(x, Sse2.Or(y.AsSingle(), default).AsDouble()); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse2_CompareScalarUnorderedEqual_Branch_Swap(in Vector128 x, in Vector128 y) + { + return Sse2.CompareScalarUnorderedEqual(x, Sse2.Or(y.AsSingle(), default).AsDouble()) ? True() : False(); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse_CompareScalarUnorderedNotEqual_Normal(in Vector128 x, in Vector128 y) + { + return Sse.CompareScalarUnorderedNotEqual(x, y); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse_CompareScalarUnorderedNotEqual_LogicalNot(in Vector128 x, in Vector128 y) + { + return !Sse.CompareScalarUnorderedNotEqual(x, y); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse_CompareScalarUnorderedNotEqual_Branch(in Vector128 x, in Vector128 y) + { + return Sse.CompareScalarUnorderedNotEqual(x, y) ? True() : False(); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse_CompareScalarUnorderedNotEqual_Swap(in Vector128 x, in Vector128 y) + { + return Sse.CompareScalarUnorderedNotEqual(x, Sse.Or(y.AsSingle(), default).AsSingle()); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse_CompareScalarUnorderedNotEqual_Branch_Swap(in Vector128 x, in Vector128 y) + { + return Sse.CompareScalarUnorderedNotEqual(x, Sse.Or(y.AsSingle(), default).AsSingle()) ? True() : False(); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse2_CompareScalarUnorderedNotEqual_Normal(in Vector128 x, in Vector128 y) + { + return Sse2.CompareScalarUnorderedNotEqual(x, y); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse2_CompareScalarUnorderedNotEqual_LogicalNot(in Vector128 x, in Vector128 y) + { + return !Sse2.CompareScalarUnorderedNotEqual(x, y); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse2_CompareScalarUnorderedNotEqual_Branch(in Vector128 x, in Vector128 y) + { + return Sse2.CompareScalarUnorderedNotEqual(x, y) ? True() : False(); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse2_CompareScalarUnorderedNotEqual_Swap(in Vector128 x, in Vector128 y) + { + return Sse2.CompareScalarUnorderedNotEqual(x, Sse2.Or(y.AsSingle(), default).AsDouble()); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse2_CompareScalarUnorderedNotEqual_Branch_Swap(in Vector128 x, in Vector128 y) + { + return Sse2.CompareScalarUnorderedNotEqual(x, Sse2.Or(y.AsSingle(), default).AsDouble()) ? True() : False(); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse_CompareScalarUnorderedLessThan_Normal(in Vector128 x, in Vector128 y) + { + return Sse.CompareScalarUnorderedLessThan(x, y); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse_CompareScalarUnorderedLessThan_LogicalNot(in Vector128 x, in Vector128 y) + { + return !Sse.CompareScalarUnorderedLessThan(x, y); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse_CompareScalarUnorderedLessThan_Branch(in Vector128 x, in Vector128 y) + { + return Sse.CompareScalarUnorderedLessThan(x, y) ? True() : False(); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse_CompareScalarUnorderedLessThan_Swap(in Vector128 x, in Vector128 y) + { + return Sse.CompareScalarUnorderedLessThan(x, Sse.Or(y.AsSingle(), default).AsSingle()); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse_CompareScalarUnorderedLessThan_Branch_Swap(in Vector128 x, in Vector128 y) + { + return Sse.CompareScalarUnorderedLessThan(x, Sse.Or(y.AsSingle(), default).AsSingle()) ? True() : False(); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse2_CompareScalarUnorderedLessThan_Normal(in Vector128 x, in Vector128 y) + { + return Sse2.CompareScalarUnorderedLessThan(x, y); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse2_CompareScalarUnorderedLessThan_LogicalNot(in Vector128 x, in Vector128 y) + { + return !Sse2.CompareScalarUnorderedLessThan(x, y); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse2_CompareScalarUnorderedLessThan_Branch(in Vector128 x, in Vector128 y) + { + return Sse2.CompareScalarUnorderedLessThan(x, y) ? True() : False(); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse2_CompareScalarUnorderedLessThan_Swap(in Vector128 x, in Vector128 y) + { + return Sse2.CompareScalarUnorderedLessThan(x, Sse2.Or(y.AsSingle(), default).AsDouble()); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse2_CompareScalarUnorderedLessThan_Branch_Swap(in Vector128 x, in Vector128 y) + { + return Sse2.CompareScalarUnorderedLessThan(x, Sse2.Or(y.AsSingle(), default).AsDouble()) ? True() : False(); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse_CompareScalarUnorderedLessThanOrEqual_Normal(in Vector128 x, in Vector128 y) + { + return Sse.CompareScalarUnorderedLessThanOrEqual(x, y); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse_CompareScalarUnorderedLessThanOrEqual_LogicalNot(in Vector128 x, in Vector128 y) + { + return !Sse.CompareScalarUnorderedLessThanOrEqual(x, y); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse_CompareScalarUnorderedLessThanOrEqual_Branch(in Vector128 x, in Vector128 y) + { + return Sse.CompareScalarUnorderedLessThanOrEqual(x, y) ? True() : False(); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse_CompareScalarUnorderedLessThanOrEqual_Swap(in Vector128 x, in Vector128 y) + { + return Sse.CompareScalarUnorderedLessThanOrEqual(x, Sse.Or(y.AsSingle(), default).AsSingle()); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse_CompareScalarUnorderedLessThanOrEqual_Branch_Swap(in Vector128 x, in Vector128 y) + { + return Sse.CompareScalarUnorderedLessThanOrEqual(x, Sse.Or(y.AsSingle(), default).AsSingle()) ? True() : False(); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse2_CompareScalarUnorderedLessThanOrEqual_Normal(in Vector128 x, in Vector128 y) + { + return Sse2.CompareScalarUnorderedLessThanOrEqual(x, y); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse2_CompareScalarUnorderedLessThanOrEqual_LogicalNot(in Vector128 x, in Vector128 y) + { + return !Sse2.CompareScalarUnorderedLessThanOrEqual(x, y); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse2_CompareScalarUnorderedLessThanOrEqual_Branch(in Vector128 x, in Vector128 y) + { + return Sse2.CompareScalarUnorderedLessThanOrEqual(x, y) ? True() : False(); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse2_CompareScalarUnorderedLessThanOrEqual_Swap(in Vector128 x, in Vector128 y) + { + return Sse2.CompareScalarUnorderedLessThanOrEqual(x, Sse2.Or(y.AsSingle(), default).AsDouble()); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse2_CompareScalarUnorderedLessThanOrEqual_Branch_Swap(in Vector128 x, in Vector128 y) + { + return Sse2.CompareScalarUnorderedLessThanOrEqual(x, Sse2.Or(y.AsSingle(), default).AsDouble()) ? True() : False(); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse_CompareScalarUnorderedGreaterThan_Normal(in Vector128 x, in Vector128 y) + { + return Sse.CompareScalarUnorderedGreaterThan(x, y); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse_CompareScalarUnorderedGreaterThan_LogicalNot(in Vector128 x, in Vector128 y) + { + return !Sse.CompareScalarUnorderedGreaterThan(x, y); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse_CompareScalarUnorderedGreaterThan_Branch(in Vector128 x, in Vector128 y) + { + return Sse.CompareScalarUnorderedGreaterThan(x, y) ? True() : False(); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse_CompareScalarUnorderedGreaterThan_Swap(in Vector128 x, in Vector128 y) + { + return Sse.CompareScalarUnorderedGreaterThan(x, Sse.Or(y.AsSingle(), default).AsSingle()); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse_CompareScalarUnorderedGreaterThan_Branch_Swap(in Vector128 x, in Vector128 y) + { + return Sse.CompareScalarUnorderedGreaterThan(x, Sse.Or(y.AsSingle(), default).AsSingle()) ? True() : False(); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse2_CompareScalarUnorderedGreaterThan_Normal(in Vector128 x, in Vector128 y) + { + return Sse2.CompareScalarUnorderedGreaterThan(x, y); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse2_CompareScalarUnorderedGreaterThan_LogicalNot(in Vector128 x, in Vector128 y) + { + return !Sse2.CompareScalarUnorderedGreaterThan(x, y); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse2_CompareScalarUnorderedGreaterThan_Branch(in Vector128 x, in Vector128 y) + { + return Sse2.CompareScalarUnorderedGreaterThan(x, y) ? True() : False(); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse2_CompareScalarUnorderedGreaterThan_Swap(in Vector128 x, in Vector128 y) + { + return Sse2.CompareScalarUnorderedGreaterThan(x, Sse2.Or(y.AsSingle(), default).AsDouble()); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse2_CompareScalarUnorderedGreaterThan_Branch_Swap(in Vector128 x, in Vector128 y) + { + return Sse2.CompareScalarUnorderedGreaterThan(x, Sse2.Or(y.AsSingle(), default).AsDouble()) ? True() : False(); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse_CompareScalarUnorderedGreaterThanOrEqual_Normal(in Vector128 x, in Vector128 y) + { + return Sse.CompareScalarUnorderedGreaterThanOrEqual(x, y); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse_CompareScalarUnorderedGreaterThanOrEqual_LogicalNot(in Vector128 x, in Vector128 y) + { + return !Sse.CompareScalarUnorderedGreaterThanOrEqual(x, y); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse_CompareScalarUnorderedGreaterThanOrEqual_Branch(in Vector128 x, in Vector128 y) + { + return Sse.CompareScalarUnorderedGreaterThanOrEqual(x, y) ? True() : False(); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse_CompareScalarUnorderedGreaterThanOrEqual_Swap(in Vector128 x, in Vector128 y) + { + return Sse.CompareScalarUnorderedGreaterThanOrEqual(x, Sse.Or(y.AsSingle(), default).AsSingle()); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse_CompareScalarUnorderedGreaterThanOrEqual_Branch_Swap(in Vector128 x, in Vector128 y) + { + return Sse.CompareScalarUnorderedGreaterThanOrEqual(x, Sse.Or(y.AsSingle(), default).AsSingle()) ? True() : False(); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse2_CompareScalarUnorderedGreaterThanOrEqual_Normal(in Vector128 x, in Vector128 y) + { + return Sse2.CompareScalarUnorderedGreaterThanOrEqual(x, y); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse2_CompareScalarUnorderedGreaterThanOrEqual_LogicalNot(in Vector128 x, in Vector128 y) + { + return !Sse2.CompareScalarUnorderedGreaterThanOrEqual(x, y); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse2_CompareScalarUnorderedGreaterThanOrEqual_Branch(in Vector128 x, in Vector128 y) + { + return Sse2.CompareScalarUnorderedGreaterThanOrEqual(x, y) ? True() : False(); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse2_CompareScalarUnorderedGreaterThanOrEqual_Swap(in Vector128 x, in Vector128 y) + { + return Sse2.CompareScalarUnorderedGreaterThanOrEqual(x, Sse2.Or(y.AsSingle(), default).AsDouble()); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse2_CompareScalarUnorderedGreaterThanOrEqual_Branch_Swap(in Vector128 x, in Vector128 y) + { + return Sse2.CompareScalarUnorderedGreaterThanOrEqual(x, Sse2.Or(y.AsSingle(), default).AsDouble()) ? True() : False(); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse41_TestZ_Normal(in Vector128 x, in Vector128 y) + { + return Sse41.TestZ(x, y); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse41_TestZ_LogicalNot(in Vector128 x, in Vector128 y) + { + return !Sse41.TestZ(x, y); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse41_TestZ_Branch(in Vector128 x, in Vector128 y) + { + return Sse41.TestZ(x, y) ? True() : False(); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse41_TestZ_Swap(in Vector128 x, in Vector128 y) + { + return Sse41.TestZ(x, Sse41.Or(y.AsSingle(), default).AsInt32()); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse41_TestZ_LogicalNot_Swap(in Vector128 x, in Vector128 y) + { + return !Sse41.TestZ(x, Sse41.Or(y.AsSingle(), default).AsInt32()); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Avx_TestZ_Normal(in Vector128 x, in Vector128 y) + { + return Avx.TestZ(x, y); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Avx_TestZ_LogicalNot(in Vector128 x, in Vector128 y) + { + return !Avx.TestZ(x, y); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Avx_TestZ_Branch(in Vector128 x, in Vector128 y) + { + return Avx.TestZ(x, y) ? True() : False(); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Avx_TestZ_Swap(in Vector128 x, in Vector128 y) + { + return Avx.TestZ(x, Avx.Or(y.AsSingle(), default).AsInt32()); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Avx_TestZ_LogicalNot_Swap(in Vector128 x, in Vector128 y) + { + return !Avx.TestZ(x, Avx.Or(y.AsSingle(), default).AsInt32()); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Avx_TestZ_Normal(in Vector256 x, in Vector256 y) + { + return Avx.TestZ(x, y); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Avx_TestZ_LogicalNot(in Vector256 x, in Vector256 y) + { + return !Avx.TestZ(x, y); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Avx_TestZ_Branch(in Vector256 x, in Vector256 y) + { + return Avx.TestZ(x, y) ? True() : False(); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Avx_TestZ_Swap(in Vector256 x, in Vector256 y) + { + return Avx.TestZ(x, Avx.Or(y.AsSingle(), default).AsInt32()); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Avx_TestZ_LogicalNot_Swap(in Vector256 x, in Vector256 y) + { + return !Avx.TestZ(x, Avx.Or(y.AsSingle(), default).AsInt32()); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse41_TestC_Normal(in Vector128 x, in Vector128 y) + { + return Sse41.TestC(x, y); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse41_TestC_LogicalNot(in Vector128 x, in Vector128 y) + { + return !Sse41.TestC(x, y); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse41_TestC_Branch(in Vector128 x, in Vector128 y) + { + return Sse41.TestC(x, y) ? True() : False(); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse41_TestC_Swap(in Vector128 x, in Vector128 y) + { + return Sse41.TestC(x, Sse41.Or(y.AsSingle(), default).AsInt32()); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse41_TestC_LogicalNot_Swap(in Vector128 x, in Vector128 y) + { + return !Sse41.TestC(x, Sse41.Or(y.AsSingle(), default).AsInt32()); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Avx_TestC_Normal(in Vector128 x, in Vector128 y) + { + return Avx.TestC(x, y); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Avx_TestC_LogicalNot(in Vector128 x, in Vector128 y) + { + return !Avx.TestC(x, y); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Avx_TestC_Branch(in Vector128 x, in Vector128 y) + { + return Avx.TestC(x, y) ? True() : False(); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Avx_TestC_Swap(in Vector128 x, in Vector128 y) + { + return Avx.TestC(x, Avx.Or(y.AsSingle(), default).AsInt32()); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Avx_TestC_LogicalNot_Swap(in Vector128 x, in Vector128 y) + { + return !Avx.TestC(x, Avx.Or(y.AsSingle(), default).AsInt32()); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Avx_TestC_Normal(in Vector256 x, in Vector256 y) + { + return Avx.TestC(x, y); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Avx_TestC_LogicalNot(in Vector256 x, in Vector256 y) + { + return !Avx.TestC(x, y); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Avx_TestC_Branch(in Vector256 x, in Vector256 y) + { + return Avx.TestC(x, y) ? True() : False(); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Avx_TestC_Swap(in Vector256 x, in Vector256 y) + { + return Avx.TestC(x, Avx.Or(y.AsSingle(), default).AsInt32()); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Avx_TestC_LogicalNot_Swap(in Vector256 x, in Vector256 y) + { + return !Avx.TestC(x, Avx.Or(y.AsSingle(), default).AsInt32()); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse41_TestNotZAndNotC_Normal(in Vector128 x, in Vector128 y) + { + return Sse41.TestNotZAndNotC(x, y); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse41_TestNotZAndNotC_LogicalNot(in Vector128 x, in Vector128 y) + { + return !Sse41.TestNotZAndNotC(x, y); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse41_TestNotZAndNotC_Branch(in Vector128 x, in Vector128 y) + { + return Sse41.TestNotZAndNotC(x, y) ? True() : False(); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse41_TestNotZAndNotC_Swap(in Vector128 x, in Vector128 y) + { + return Sse41.TestNotZAndNotC(x, Sse41.Or(y.AsSingle(), default).AsInt32()); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Sse41_TestNotZAndNotC_LogicalNot_Swap(in Vector128 x, in Vector128 y) + { + return !Sse41.TestNotZAndNotC(x, Sse41.Or(y.AsSingle(), default).AsInt32()); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Avx_TestNotZAndNotC_Normal(in Vector128 x, in Vector128 y) + { + return Avx.TestNotZAndNotC(x, y); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Avx_TestNotZAndNotC_LogicalNot(in Vector128 x, in Vector128 y) + { + return !Avx.TestNotZAndNotC(x, y); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Avx_TestNotZAndNotC_Branch(in Vector128 x, in Vector128 y) + { + return Avx.TestNotZAndNotC(x, y) ? True() : False(); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Avx_TestNotZAndNotC_Swap(in Vector128 x, in Vector128 y) + { + return Avx.TestNotZAndNotC(x, Avx.Or(y.AsSingle(), default).AsInt32()); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Avx_TestNotZAndNotC_LogicalNot_Swap(in Vector128 x, in Vector128 y) + { + return !Avx.TestNotZAndNotC(x, Avx.Or(y.AsSingle(), default).AsInt32()); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Avx_TestNotZAndNotC_Normal(in Vector256 x, in Vector256 y) + { + return Avx.TestNotZAndNotC(x, y); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Avx_TestNotZAndNotC_LogicalNot(in Vector256 x, in Vector256 y) + { + return !Avx.TestNotZAndNotC(x, y); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Avx_TestNotZAndNotC_Branch(in Vector256 x, in Vector256 y) + { + return Avx.TestNotZAndNotC(x, y) ? True() : False(); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Avx_TestNotZAndNotC_Swap(in Vector256 x, in Vector256 y) + { + return Avx.TestNotZAndNotC(x, Avx.Or(y.AsSingle(), default).AsInt32()); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Avx_TestNotZAndNotC_LogicalNot_Swap(in Vector256 x, in Vector256 y) + { + return !Avx.TestNotZAndNotC(x, Avx.Or(y.AsSingle(), default).AsInt32()); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Avx_TestZ_Normal(in Vector128 x, in Vector128 y) + { + return Avx.TestZ(x, y); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Avx_TestZ_LogicalNot(in Vector128 x, in Vector128 y) + { + return !Avx.TestZ(x, y); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Avx_TestZ_Branch(in Vector128 x, in Vector128 y) + { + return Avx.TestZ(x, y) ? True() : False(); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Avx_TestZ_Swap(in Vector128 x, in Vector128 y) + { + return Avx.TestZ(x, Avx.Or(y.AsSingle(), default).AsSingle()); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Avx_TestZ_LogicalNot_Branch_Swap(in Vector128 x, in Vector128 y) + { + return !Avx.TestZ(x, Avx.Or(y.AsSingle(), default).AsSingle()) ? True() : False(); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Avx_TestZ_Normal(in Vector256 x, in Vector256 y) + { + return Avx.TestZ(x, y); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Avx_TestZ_LogicalNot(in Vector256 x, in Vector256 y) + { + return !Avx.TestZ(x, y); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Avx_TestZ_Branch(in Vector256 x, in Vector256 y) + { + return Avx.TestZ(x, y) ? True() : False(); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Avx_TestZ_Swap(in Vector256 x, in Vector256 y) + { + return Avx.TestZ(x, Avx.Or(y.AsSingle(), default).AsSingle()); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Avx_TestZ_LogicalNot_Branch_Swap(in Vector256 x, in Vector256 y) + { + return !Avx.TestZ(x, Avx.Or(y.AsSingle(), default).AsSingle()) ? True() : False(); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Avx_TestC_Normal(in Vector128 x, in Vector128 y) + { + return Avx.TestC(x, y); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Avx_TestC_LogicalNot(in Vector128 x, in Vector128 y) + { + return !Avx.TestC(x, y); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Avx_TestC_Branch(in Vector128 x, in Vector128 y) + { + return Avx.TestC(x, y) ? True() : False(); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Avx_TestC_Swap(in Vector128 x, in Vector128 y) + { + return Avx.TestC(x, Avx.Or(y.AsSingle(), default).AsSingle()); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Avx_TestC_LogicalNot_Branch_Swap(in Vector128 x, in Vector128 y) + { + return !Avx.TestC(x, Avx.Or(y.AsSingle(), default).AsSingle()) ? True() : False(); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Avx_TestC_Normal(in Vector256 x, in Vector256 y) + { + return Avx.TestC(x, y); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Avx_TestC_LogicalNot(in Vector256 x, in Vector256 y) + { + return !Avx.TestC(x, y); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Avx_TestC_Branch(in Vector256 x, in Vector256 y) + { + return Avx.TestC(x, y) ? True() : False(); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Avx_TestC_Swap(in Vector256 x, in Vector256 y) + { + return Avx.TestC(x, Avx.Or(y.AsSingle(), default).AsSingle()); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Avx_TestC_LogicalNot_Branch_Swap(in Vector256 x, in Vector256 y) + { + return !Avx.TestC(x, Avx.Or(y.AsSingle(), default).AsSingle()) ? True() : False(); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Avx_TestNotZAndNotC_Normal(in Vector128 x, in Vector128 y) + { + return Avx.TestNotZAndNotC(x, y); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Avx_TestNotZAndNotC_LogicalNot(in Vector128 x, in Vector128 y) + { + return !Avx.TestNotZAndNotC(x, y); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Avx_TestNotZAndNotC_Branch(in Vector128 x, in Vector128 y) + { + return Avx.TestNotZAndNotC(x, y) ? True() : False(); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Avx_TestNotZAndNotC_Swap(in Vector128 x, in Vector128 y) + { + return Avx.TestNotZAndNotC(x, Avx.Or(y.AsSingle(), default).AsSingle()); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Avx_TestNotZAndNotC_LogicalNot_Branch_Swap(in Vector128 x, in Vector128 y) + { + return !Avx.TestNotZAndNotC(x, Avx.Or(y.AsSingle(), default).AsSingle()) ? True() : False(); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Avx_TestNotZAndNotC_Normal(in Vector256 x, in Vector256 y) + { + return Avx.TestNotZAndNotC(x, y); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Avx_TestNotZAndNotC_LogicalNot(in Vector256 x, in Vector256 y) + { + return !Avx.TestNotZAndNotC(x, y); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Avx_TestNotZAndNotC_Branch(in Vector256 x, in Vector256 y) + { + return Avx.TestNotZAndNotC(x, y) ? True() : False(); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Avx_TestNotZAndNotC_Swap(in Vector256 x, in Vector256 y) + { + return Avx.TestNotZAndNotC(x, Avx.Or(y.AsSingle(), default).AsSingle()); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Test_Avx_TestNotZAndNotC_LogicalNot_Branch_Swap(in Vector256 x, in Vector256 y) + { + return !Avx.TestNotZAndNotC(x, Avx.Or(y.AsSingle(), default).AsSingle()) ? True() : False(); + } +} diff --git a/src/coreclr/tests/src/JIT/HardwareIntrinsics/X86/Regression/GitHub_17073/GitHub_17073.csproj b/src/coreclr/tests/src/JIT/HardwareIntrinsics/X86/Regression/GitHub_17073/GitHub_17073.csproj new file mode 100644 index 0000000000000000000000000000000000000000..f834ca1861ee5184881049fefbfb8f4f0fb0f3b2 --- /dev/null +++ b/src/coreclr/tests/src/JIT/HardwareIntrinsics/X86/Regression/GitHub_17073/GitHub_17073.csproj @@ -0,0 +1,13 @@ + + + Exe + true + + + Embedded + True + + + + + diff --git a/src/coreclr/tests/src/JIT/HardwareIntrinsics/X86/Regression/GitHub_17073/GitHub_17073_gen.csx b/src/coreclr/tests/src/JIT/HardwareIntrinsics/X86/Regression/GitHub_17073/GitHub_17073_gen.csx new file mode 100644 index 0000000000000000000000000000000000000000..971ece3d27105fde7abb905c594fc8a2c3ec6e40 --- /dev/null +++ b/src/coreclr/tests/src/JIT/HardwareIntrinsics/X86/Regression/GitHub_17073/GitHub_17073_gen.csx @@ -0,0 +1,353 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. +// See the LICENSE file in the project root for more information. + +// This C# script can be executed using the csi +// tool found in Tools\net46\roslyn\tools. +// +// It produces a C# file (on stdout) containing tests for various +// COMISS/UCOMISS/PTEST/VTESTPS/VTESTPD based intrinsics. + +using System; +using System.Collections.Generic; +using System.IO; + +[Flags] +enum TestKind +{ + // Use the intrinsic as is + Normal = 0, + // Negate the intrinsic result + LogicalNot = 1, + // Use a branch to test the intrinsic result + Branch = 2, + // Try to cause the intrinsic operands to be + // swapped by placing the first operand in + // memory and the second in a register. + Swap = 4 +} + +void GenerateCompareTests(List tests) +{ + var inputs = new (double x, double y)[] + { + (42.0, 42.0), + (41.0, 42.0), + (42.0, 41.0), + (42.0, double.NaN), + (double.NaN, double.NaN) + }; + + bool EQ(double x, double y) => x == y; + bool NE(double x, double y) => x != y; + bool LT(double x, double y) => x < y; + bool LE(double x, double y) => x <= y; + bool GT(double x, double y) => x > y; + bool GE(double x, double y) => x >= y; + + foreach (var intrinsic in new (string name, Func op, (double x, double y)[] inputs)[] + { + ("CompareScalarOrderedEqual", EQ, inputs), + ("CompareScalarOrderedNotEqual", NE, inputs), + ("CompareScalarOrderedLessThan", LT, inputs), + ("CompareScalarOrderedLessThanOrEqual", LE, inputs), + ("CompareScalarOrderedGreaterThan", GT, inputs), + ("CompareScalarOrderedGreaterThanOrEqual", GE, inputs), + + ("CompareScalarUnorderedEqual", EQ, inputs), + ("CompareScalarUnorderedNotEqual", NE, inputs), + ("CompareScalarUnorderedLessThan", LT, inputs), + ("CompareScalarUnorderedLessThanOrEqual", LE, inputs), + ("CompareScalarUnorderedGreaterThan", GT, inputs), + ("CompareScalarUnorderedGreaterThanOrEqual", GE, inputs) + }) + { + foreach ((string isa, int vectorSize, string vectorElementType) in new[] + { + ("Sse", 128, "Single"), + ("Sse2", 128, "Double") + }) + { + foreach (TestKind kind in new[] + { + TestKind.Normal, + TestKind.LogicalNot, + TestKind.Branch, + TestKind.Swap, + TestKind.Swap | TestKind.Branch + }) + { + tests.Add(new BinaryOpTest(isa, intrinsic.name, vectorSize, vectorElementType, kind, intrinsic.op, intrinsic.inputs)); + } + } + } +} + +void GeneratePackedIntTestTests(List tests) +{ + var inputs = new (int x, int y)[] + { + (0, 0), + (1, 2), + (2, 3), + (3, 2) + }; + + bool Z(int x, int y) => (x & y) == 0; + bool C(int x, int y) => (~x & y) == 0; + + foreach (var intrinsic in new (string name, Func op, (int x, int y)[] inputs)[] + { + ("TestZ", Z, inputs), + ("TestC", C, inputs), + ("TestNotZAndNotC", (x, y) => !Z(x, y) & !C(x, y), inputs) + }) + { + foreach ((string isa, int vectorSize, string vectorElementType) in new[] + { + ("Sse41", 128, "Int32"), + ("Avx", 128, "Int32"), + ("Avx", 256, "Int32") + }) + { + foreach (TestKind kind in new[] + { + TestKind.Normal, + TestKind.LogicalNot, + TestKind.Branch, + TestKind.Swap, + TestKind.Swap | TestKind.LogicalNot + }) + { + tests.Add(new BinaryOpTest(isa, intrinsic.name, vectorSize, vectorElementType, kind, intrinsic.op, intrinsic.inputs)); + } + } + } +} + +void GeneratePackedDoubleTestTests(List tests) +{ + var inputs = new (double x, double y)[] + { + (1.0, 1.0), + (1.0, -1.0), + (-1.0, -1.0) + }; + + bool S(double d) => d < 0.0; + bool Z(double x, double y) => (S(x) & S(y)) == false; + bool C(double x, double y) => (!S(x) & S(y)) == false; + + foreach (var intrinsic in new (string name, Func op, (double x, double y)[] inputs)[] + { + ("TestZ", Z, inputs), + ("TestC", C, inputs), + ("TestNotZAndNotC", (x, y) => !Z(x, y) && !C(x, y), inputs) + }) + { + foreach ((string isa, int vectorSize, string vectorElementType) in new[] + { + ("Avx", 128, "Single"), + ("Avx", 256, "Single") + }) + { + foreach (TestKind kind in new[] + { + TestKind.Normal, + TestKind.LogicalNot, + TestKind.Branch, + TestKind.Swap, + TestKind.Swap | TestKind.Branch | TestKind.LogicalNot + }) + { + tests.Add(new BinaryOpTest(isa, intrinsic.name, vectorSize, vectorElementType, kind, intrinsic.op, intrinsic.inputs)); + } + } + } +} + +static string CreateVector(int vectorSize, string vectorElementType, double value) +{ + if (vectorElementType == "Single") + return double.IsNaN(value) ? $"Vector{vectorSize}.Create(float.NaN)" : $"Vector{vectorSize}.Create({value:F1}f)"; + if (vectorElementType == "Double") + return double.IsNaN(value) ? $"Vector{vectorSize}.Create(double.NaN)" : $"Vector{vectorSize}.Create({value:F1})"; + throw new NotSupportedException(); +} + +static string CreateVector(int vectorSize, int value) +{ + return $"Vector{vectorSize}.Create({value})"; +} + +static string CreateVector(int vectorSize, string vectorElementType, T value) +{ + if (value is double d) + return CreateVector(vectorSize, vectorElementType, d); + if (value is int i) + return CreateVector(vectorSize, i); + throw new NotSupportedException(); +} + +abstract class Test +{ + public readonly string Isa; + public readonly string Intrinsic; + public readonly int VectorSize; + public readonly string VectorElementType; + public readonly string VectorType; + public readonly TestKind Kind; + + public Test(string isa, string intrinsic, int vectorSize, string vectorElementType, TestKind kind) + { + Isa = isa; + Intrinsic = intrinsic; + VectorSize = vectorSize; + VectorElementType = vectorElementType; + VectorType = $"Vector{VectorSize}<{VectorElementType}>"; + Kind = kind; + } + + public string Name => $"Test_{Isa}_{Intrinsic}_{Kind.ToString().Replace(',', '_').Replace(" ", "")}"; + public abstract void WriteTestMethod(TextWriter w); + public abstract void WriteTestCases(TextWriter w); +} + +class UnaryOpTest : Test +{ + Func op; + T[] inputs; + + public UnaryOpTest(string isa, string intrinsic, int vectorSize, string vectorElementType, TestKind kind, Func op, T[] inputs) + : base(isa, intrinsic, vectorSize, vectorElementType, kind) + { + this.op = op; + this.inputs = inputs; + } + + public override void WriteTestMethod(TextWriter w) + { + w.WriteLine(); + w.WriteLine(" [MethodImpl(MethodImplOptions.NoInlining)]"); + w.WriteLine($" static bool {Name}(in {VectorType} x)"); + w.WriteLine(" {"); + + w.Write(" return "); + + if (Kind.HasFlag(TestKind.LogicalNot)) + w.Write("!"); + + w.Write($"{Isa}.{Intrinsic}(x)"); + + if (Kind.HasFlag(TestKind.Branch)) + w.Write(" ? True() : False()"); + + w.WriteLine(";"); + w.WriteLine(" }"); + } + + string Check(T x) + { + return (Kind.HasFlag(TestKind.LogicalNot) ? !op(x) : op(x)).ToString().ToLowerInvariant(); + } + + public override void WriteTestCases(TextWriter w) + { + foreach (var input in inputs) + w.WriteLine($" r &= !{Isa}.IsSupported || Check({Check(input)}, {Name}({CreateVector(VectorSize, VectorElementType, input)}));"); + } +} + +class BinaryOpTest : Test +{ + Func op; + (T x, T y)[] inputs; + + public BinaryOpTest(string isa, string intrinsic, int vectorSize, string vectorElementType, TestKind kind, Func op, (T x, T y)[] inputs) + : base(isa, intrinsic, vectorSize, vectorElementType, kind) + { + this.op = op; + this.inputs = inputs; + } + + public override void WriteTestMethod(TextWriter w) + { + w.WriteLine(); + w.WriteLine(" [MethodImpl(MethodImplOptions.NoInlining)]"); + // Pass parameters by reference so we get consistency accross various ABIs. + // We get operands in memory and by adding an extra "nop" intrinsic we can + // force one of the operands in a register, just enough to catch some cases + // of containment. + w.WriteLine($" static bool {Name}(in {VectorType} x, in {VectorType} y)"); + w.WriteLine(" {"); + w.Write(" return "); + + if (Kind.HasFlag(TestKind.LogicalNot)) + w.Write("!"); + + if (Kind.HasFlag(TestKind.Swap)) + w.Write($"{Isa}.{Intrinsic}(x, {Isa}.Or(y.AsSingle(), default).As{VectorElementType}())"); + else + w.Write($"{Isa}.{Intrinsic}(x, y)"); + + if (Kind.HasFlag(TestKind.Branch)) + w.Write(" ? True() : False()"); + + w.WriteLine(";"); + w.WriteLine(" }"); + } + + string Check((T x, T y) input) + { + return (Kind.HasFlag(TestKind.LogicalNot) ? !op(input.x, input.y) : op(input.x, input.y)).ToString().ToLowerInvariant(); + } + + public override void WriteTestCases(TextWriter w) + { + foreach (var input in inputs) + w.WriteLine($" r &= !{Isa}.IsSupported || Check({Check(input)}, {Name}({CreateVector(VectorSize, VectorElementType, input.x)}, {CreateVector(VectorSize, VectorElementType, input.y)}));"); + } +} + +var tests = new List(); +GenerateCompareTests(tests); +GeneratePackedIntTestTests(tests); +GeneratePackedDoubleTestTests(tests); + +var w = Console.Out; +w.WriteLine(@"// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. +// See the LICENSE file in the project root for more information. + +using System; +using System.Runtime.CompilerServices; +using System.Runtime.Intrinsics; +using System.Runtime.Intrinsics.X86; + +class Program +{ + [MethodImpl(MethodImplOptions.NoInlining)] static bool True() => true; + [MethodImpl(MethodImplOptions.NoInlining)] static bool False() => false; + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool Check(bool expected, bool actual, [CallerLineNumber] int line = 0) + { + if (expected != actual) Console.WriteLine(""Failed at line {0}"", line); + return expected == actual; + } +"); + +w.WriteLine(" static int Main()"); +w.WriteLine(" {"); +w.WriteLine(" bool r = true;"); + +foreach (var test in tests) + test.WriteTestCases(w); + +w.WriteLine(" return r ? 100 : 42;"); +w.WriteLine(" }"); + +foreach (var test in tests) + test.WriteTestMethod(w); + +w.WriteLine("}");