Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions src/coreclr/jit/codegenriscv64.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -5040,6 +5040,7 @@ void CodeGen::genCodeForShift(GenTree* tree)
}
else
{
assert(isImmed(tree));
instruction ins = genGetInsForOper(tree);
unsigned shiftByImm = (unsigned)shiftBy->AsIntCon()->gtIconVal;

Expand Down
102 changes: 83 additions & 19 deletions src/coreclr/jit/lower.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -4127,7 +4127,37 @@ GenTree* Lowering::OptimizeConstCompare(GenTree* cmp)
GenTree* op1 = cmp->gtGetOp1();
GenTreeIntCon* op2 = cmp->gtGetOp2()->AsIntCon();

#if defined(TARGET_XARCH) || defined(TARGET_ARM64)
#if defined(TARGET_XARCH) || defined(TARGET_ARM64) || defined(TARGET_RISCV64)

// If 'test' is a single bit test, leaves the tested expr in the left op, the bit index in the right op, and returns
// true. Otherwise, returns false.
auto tryReduceSingleBitTestOps = [this](GenTreeOp* test) -> bool {
assert(test->OperIs(GT_AND, GT_TEST_EQ, GT_TEST_NE));
GenTree* testedOp = test->gtOp1;
GenTree* bitOp = test->gtOp2;
#ifdef TARGET_RISCV64
if (bitOp->IsIntegralConstUnsignedPow2())
{
INT64 bit = bitOp->AsIntConCommon()->IntegralValue();
int log2 = BitOperations::Log2((UINT64)bit);
bitOp->AsIntConCommon()->SetIntegralValue(log2);
return true;
}
#endif
if (!bitOp->OperIs(GT_LSH))
std::swap(bitOp, testedOp);

if (bitOp->OperIs(GT_LSH) && varTypeIsIntOrI(bitOp) && bitOp->gtGetOp1()->IsIntegralConst(1))
{
BlockRange().Remove(bitOp->gtGetOp1());
BlockRange().Remove(bitOp);
test->gtOp1 = testedOp;
test->gtOp2 = bitOp->gtGetOp2();
return true;
}
return false;
};

ssize_t op2Value = op2->IconValue();

#ifdef TARGET_XARCH
Expand Down Expand Up @@ -4165,6 +4195,8 @@ GenTree* Lowering::OptimizeConstCompare(GenTree* cmp)
bool removeCast =
#ifdef TARGET_ARM64
(op2Value == 0) && cmp->OperIs(GT_EQ, GT_NE, GT_GT) && !castOp->isContained() &&
#elif defined(TARGET_RISCV64)
false && // disable, comparisons and bit operations are full-register only
#endif
(castOp->OperIs(GT_LCL_VAR, GT_CALL, GT_OR, GT_XOR, GT_AND)
#ifdef TARGET_XARCH
Expand Down Expand Up @@ -4222,6 +4254,52 @@ GenTree* Lowering::OptimizeConstCompare(GenTree* cmp)
cmp->SetOperRaw(GenTree::ReverseRelop(cmp->OperGet()));
}

#ifdef TARGET_RISCV64
if (op2Value == 0 && !andOp2->isContained() && tryReduceSingleBitTestOps(op1->AsOp()))
{
GenTree* testedOp = op1->gtGetOp1();
GenTree* bitIndexOp = op1->gtGetOp2();

if (bitIndexOp->IsIntegralConst())
{
// Shift the tested bit into the sign bit, then check if negative/positive.
// Work on whole registers because comparisons and compressed shifts are full-register only.
INT64 bitIndex = bitIndexOp->AsIntConCommon()->IntegralValue();
INT64 signBitIndex = genTypeSize(TYP_I_IMPL) * 8 - 1;
if (bitIndex < signBitIndex)
{
bitIndexOp->AsIntConCommon()->SetIntegralValue(signBitIndex - bitIndex);
bitIndexOp->SetContained();
op1->SetOperRaw(GT_LSH);
op1->gtType = TYP_I_IMPL;
}
else
{
// The tested bit is the sign bit, remove "AND bitIndex" and only check if negative/positive
assert(bitIndex == signBitIndex);
assert(genActualType(testedOp) == TYP_I_IMPL);
BlockRange().Remove(bitIndexOp);
BlockRange().Remove(op1);
cmp->AsOp()->gtOp1 = testedOp;
}

op2->gtType = TYP_I_IMPL;
cmp->SetOperRaw(cmp->OperIs(GT_NE) ? GT_LT : GT_GE);
cmp->ClearUnsigned();

return cmp;
}

// Shift the tested bit into the lowest bit, then AND with 1.
// The "EQ|NE 0" comparison is folded below as necessary.
var_types type = genActualType(testedOp);
op1->AsOp()->gtOp1 = andOp1 = comp->gtNewOperNode(GT_RSH, type, testedOp, bitIndexOp);
op1->AsOp()->gtOp2 = andOp2 = comp->gtNewIconNode(1, type);
BlockRange().InsertBefore(op1, andOp1, andOp2);
andOp2->SetContained();
}
#endif // TARGET_RISCV64

// Optimizes (X & 1) != 0 to (X & 1)
// Optimizes (X & 1) == 0 to ((NOT X) & 1)
// (== 1 or != 1) cases are transformed to (!= 0 or == 0) above
Expand Down Expand Up @@ -4257,6 +4335,7 @@ GenTree* Lowering::OptimizeConstCompare(GenTree* cmp)

if (op2Value == 0)
{
#ifndef TARGET_RISCV64
BlockRange().Remove(op1);
BlockRange().Remove(op2);

Expand Down Expand Up @@ -4300,6 +4379,7 @@ GenTree* Lowering::OptimizeConstCompare(GenTree* cmp)
}
}
#endif
#endif // !TARGET_RISCV64
}
else if (andOp2->IsIntegralConst() && GenTree::Compare(andOp2, op2))
{
Expand Down Expand Up @@ -4328,31 +4408,15 @@ GenTree* Lowering::OptimizeConstCompare(GenTree* cmp)
// Note that BT has the same behavior as LSH when the bit index exceeds the
// operand bit size - it uses (bit_index MOD bit_size).
//

GenTree* lsh = cmp->AsOp()->gtOp1;
GenTree* op = cmp->AsOp()->gtOp2;

if (!lsh->OperIs(GT_LSH))
{
std::swap(lsh, op);
}

if (lsh->OperIs(GT_LSH) && varTypeIsIntOrI(lsh) && lsh->gtGetOp1()->IsIntegralConst(1))
if (tryReduceSingleBitTestOps(cmp->AsOp()))
{
cmp->SetOper(cmp->OperIs(GT_TEST_EQ) ? GT_BITTEST_EQ : GT_BITTEST_NE);

BlockRange().Remove(lsh->gtGetOp1());
BlockRange().Remove(lsh);

cmp->AsOp()->gtOp1 = op;
cmp->AsOp()->gtOp2 = lsh->gtGetOp2();
cmp->gtGetOp2()->ClearContained();

return cmp->gtNext;
}
}
#endif // TARGET_XARCH
#endif // defined(TARGET_XARCH) || defined(TARGET_ARM64)
#endif // defined(TARGET_XARCH) || defined(TARGET_ARM64) || defined(TARGET_RISCV64)

// Optimize EQ/NE(relop/SETCC, 0) into (maybe reversed) cond.
if (cmp->OperIs(GT_EQ, GT_NE) && op2->IsIntegralConst(0) && (op1->OperIsCompare() || op1->OperIs(GT_SETCC)))
Expand Down
80 changes: 80 additions & 0 deletions src/tests/JIT/Directed/BitTest/BitTest.cs
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,51 @@ public class Program
[MethodImpl(MethodImplOptions.NoInlining)]
static bool I8_BT_mem_reg(ref long x, int y) => (x & (1L << y)) != 0;


[MethodImpl(MethodImplOptions.NoInlining)]
static bool I1_BT_reg_min(sbyte x) => (x & (1 << 7)) != 0;

[MethodImpl(MethodImplOptions.NoInlining)]
static sbyte I1_BT_reg_min_JCC(sbyte x) => (sbyte)((x & (1 << 7)) == 0 ? (x + 1) : (x - 1));

[MethodImpl(MethodImplOptions.NoInlining)]
static bool I2_BT_reg_min(short x) => (x & (1 << 15)) != 0;

[MethodImpl(MethodImplOptions.NoInlining)]
static bool I4_BT_reg_min(int x) => (x & (1 << 31)) != 0;

[MethodImpl(MethodImplOptions.NoInlining)]
static bool I4_BT_reg_min_EQ(int x) => (x & (1 << 31)) == 0;

[MethodImpl(MethodImplOptions.NoInlining)]
static int I4_BT_reg_min_JCC(int x) => (x & (1 << 31)) == 0 ? (x + 1) : (x - 1);

[MethodImpl(MethodImplOptions.NoInlining)]
static bool I8_BT_reg_min(long x) => (x & (1L << 63)) != 0;


[MethodImpl(MethodImplOptions.NoInlining)]
static bool I1_BT_reg_min_1(sbyte x) => (x & (1 << 6)) != 0;

[MethodImpl(MethodImplOptions.NoInlining)]
static sbyte I1_BT_reg_min_1_JCC(sbyte x) => (sbyte)((x & (1 << 6)) == 0 ? (x + 1) : (x - 1));

[MethodImpl(MethodImplOptions.NoInlining)]
static bool I2_BT_reg_min_1(short x) => (x & (1 << 14)) != 0;

[MethodImpl(MethodImplOptions.NoInlining)]
static bool I4_BT_reg_min_1(int x) => (x & (1 << 30)) != 0;

[MethodImpl(MethodImplOptions.NoInlining)]
static bool I4_BT_reg_min_1_EQ(int x) => (x & (1 << 30)) == 0;

[MethodImpl(MethodImplOptions.NoInlining)]
static int I4_BT_reg_min_1_JCC(int x) => (x & (1 << 30)) == 0 ? (x + 1) : (x - 1);

[MethodImpl(MethodImplOptions.NoInlining)]
static bool I8_BT_reg_min_1(long x) => (x & (1L << 62)) != 0;


[Fact]
public static int TestEntryPoint()
{
Expand Down Expand Up @@ -107,6 +152,41 @@ public static int TestEntryPoint()
pass &= I8_BT_mem_reg(ref i8one, 64);
pass &= !I8_BT_mem_reg(ref i8two, 0);

pass &= I1_BT_reg_min(sbyte.MinValue);
pass &= !I1_BT_reg_min(sbyte.MaxValue);
pass &= !I1_BT_reg_min_1(sbyte.MinValue);
pass &= I1_BT_reg_min_1(sbyte.MaxValue);

pass &= I1_BT_reg_min_JCC(sbyte.MinValue) == sbyte.MaxValue;
pass &= I1_BT_reg_min_JCC(sbyte.MaxValue) == sbyte.MinValue;
pass &= I1_BT_reg_min_1_JCC(sbyte.MinValue) == (sbyte.MinValue + 1);
pass &= I1_BT_reg_min_1_JCC(sbyte.MaxValue) == (sbyte.MaxValue - 1);

pass &= I2_BT_reg_min(short.MinValue);
pass &= !I2_BT_reg_min(short.MaxValue);
pass &= !I2_BT_reg_min_1(short.MinValue);
pass &= I2_BT_reg_min_1(short.MaxValue);

pass &= I4_BT_reg_min(int.MinValue);
pass &= !I4_BT_reg_min(int.MaxValue);
pass &= !I4_BT_reg_min_1(int.MinValue);
pass &= I4_BT_reg_min_1(int.MaxValue);

pass &= !I4_BT_reg_min_EQ(int.MinValue);
pass &= I4_BT_reg_min_EQ(int.MaxValue);
pass &= I4_BT_reg_min_1_EQ(int.MinValue);
pass &= !I4_BT_reg_min_1_EQ(int.MaxValue);

pass &= I4_BT_reg_min_JCC(int.MinValue) == int.MaxValue;
pass &= I4_BT_reg_min_JCC(int.MaxValue) == int.MinValue;
pass &= I4_BT_reg_min_1_JCC(int.MinValue) == (int.MinValue + 1);
pass &= I4_BT_reg_min_1_JCC(int.MaxValue) == (int.MaxValue - 1);

pass &= I8_BT_reg_min(long.MinValue);
pass &= !I8_BT_reg_min(long.MaxValue);
pass &= !I8_BT_reg_min_1(long.MinValue);
pass &= I8_BT_reg_min_1(long.MaxValue);

if (pass)
{
Console.WriteLine("PASSED");
Expand Down
Loading