Reduce JIT GC allocations (#2515)

* Turn `MemoryOperand` into a struct

* Remove `IntrinsicOperation`

* Remove `PhiNode`

* Remove `Node`

* Turn `Operand` into a struct

* Turn `Operation` into a struct

* Clean up pool management methods

* Add `Arena` allocator

* Move `OperationHelper` to `Operation.Factory`

* Move `OperandHelper` to `Operand.Factory`

* Optimize `Operation` a bit

* Fix `Arena` initialization

* Rename `NativeList<T>` to `ArenaList<T>`

* Reduce `Operand` size from 88 to 56 bytes

* Reduce `Operation` size from 56 to 40 bytes

* Add optimistic interning of Register & Constant operands

* Optimize `RegisterUsage` pass a bit

* Optimize `RemoveUnusedNodes` pass a bit

Iterating in reverse-order allows killing dependency chains in a single
pass.

* Fix PPTC symbols

* Optimize `BasicBlock` a bit

Reduce allocations from `_successor` & `DominanceFrontiers`

* Fix `Operation` resize

* Make `Arena` expandable

Change the arena allocator to be expandable by allocating in pages, with
some of them being pooled. Currently 32 pages are pooled. An LRU removal
mechanism should probably be added to it.

Apparently MHR can allocate bitmaps large enough to exceed the 16MB
limit for the type.

* Move `Arena` & `ArenaList` to `Common`

* Remove `ThreadStaticPool` & co

* Add `PhiOperation`

* Reduce `Operand` size from 56 from 48 bytes

* Add linear-probing to `Operand` intern table

* Optimize `HybridAllocator` a bit

* Add `Allocators` class

* Tune `ArenaAllocator` sizes

* Add page removal mechanism to `ArenaAllocator`

Remove pages which have not been used for more than 5s after each reset.

I am on fence if this would be better using a Gen2 callback object like
the one in System.Buffers.ArrayPool<T>, to trim the pool. Because right
now if a large translation happens, the pages will be freed only after a
reset. This reset may not happen for a while because no new translation
is hit, but the arena base sizes are rather small.

* Fix `OOM` when allocating larger than page size in `ArenaAllocator`

Tweak resizing mechanism for Operand.Uses and Assignemnts.

* Optimize `Optimizer` a bit

* Optimize `Operand.Add<T>/Remove<T>` a bit

* Clean up `PreAllocator`

* Fix phi insertion order

Reduce codegen diffs.

* Fix code alignment

* Use new heuristics for degree of parallelism

* Suppress warnings

* Address gdkchan's feedback

Renamed `GetValue()` to `GetValueUnsafe()` to make it more clear that
`Operand.Value` should usually not be modified directly.

* Add fast path to `ArenaAllocator`

* Assembly for `ArenaAllocator.Allocate(ulong)`:

  .L0:
    mov rax, [rcx+0x18]
    lea r8, [rax+rdx]
    cmp r8, [rcx+0x10]
    ja short .L2
  .L1:
    mov rdx, [rcx+8]
    add rax, [rdx+8]
    mov [rcx+0x18], r8
    ret
  .L2:
    jmp ArenaAllocator.AllocateSlow(UInt64)

  A few variable/field had to be changed to ulong so that RyuJIT avoids
  emitting zero-extends.

* Implement a new heuristic to free pooled pages.

  If an arena is used often, it is more likely that its pages will be
  needed, so the pages are kept for longer (e.g: during PPTC rebuild or
  burst sof compilations). If is not used often, then it is more likely
  that its pages will not be needed (e.g: after PPTC rebuild or bursts
  of compilations).

* Address riperiperi's feedback

* Use `EqualityComparer<T>` in `IntrusiveList<T>`

Avoids a potential GC hole in `Equals(T, T)`.
This commit is contained in:
FICTURE7 2021-08-17 22:08:34 +04:00 committed by GitHub
parent cd4530f29c
commit 22b2cb39af
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
91 changed files with 2354 additions and 2142 deletions

View file

@ -1,8 +1,7 @@
using ARMeilleure.IntermediateRepresentation;
using ARMeilleure.Translation;
using System.Diagnostics;
using static ARMeilleure.IntermediateRepresentation.OperandHelper;
using static ARMeilleure.IntermediateRepresentation.Operand.Factory;
namespace ARMeilleure.CodeGen.Optimizations
{
@ -33,8 +32,10 @@ namespace ARMeilleure.CodeGen.Optimizations
{
nextBlock = block.ListNext;
if (block.SuccessorCount == 2 && block.Operations.Last is Operation branchOp)
if (block.SuccessorsCount == 2)
{
Operation branchOp = block.Operations.Last;
Debug.Assert(branchOp.Instruction == Instruction.BranchIf);
BasicBlock falseSucc = block.GetSuccessor(0);
@ -59,7 +60,7 @@ namespace ARMeilleure.CodeGen.Optimizations
if (update)
{
cfg.Update(removeUnreachableBlocks: false);
cfg.Update();
}
}
}

View file

@ -1,7 +1,6 @@
using ARMeilleure.IntermediateRepresentation;
using System;
using static ARMeilleure.IntermediateRepresentation.OperandHelper;
using static ARMeilleure.IntermediateRepresentation.Operand.Factory;
namespace ARMeilleure.CodeGen.Optimizations
{
@ -9,7 +8,7 @@ namespace ARMeilleure.CodeGen.Optimizations
{
public static void RunPass(Operation operation)
{
if (operation.Destination == null || operation.SourcesCount == 0)
if (operation.Destination == default || operation.SourcesCount == 0)
{
return;
}

View file

@ -1,8 +1,8 @@
using ARMeilleure.IntermediateRepresentation;
using ARMeilleure.Translation;
using System;
using System.Diagnostics;
using static ARMeilleure.IntermediateRepresentation.OperandHelper;
using static ARMeilleure.IntermediateRepresentation.Operand.Factory;
namespace ARMeilleure.CodeGen.Optimizations
{
@ -10,62 +10,60 @@ namespace ARMeilleure.CodeGen.Optimizations
{
public static void RunPass(ControlFlowGraph cfg)
{
// Scratch buffer used to store uses.
Span<Operation> buffer = default;
bool modified;
do
{
modified = false;
for (BasicBlock block = cfg.Blocks.First; block != null; block = block.ListNext)
for (BasicBlock block = cfg.Blocks.Last; block != null; block = block.ListPrevious)
{
Node node = block.Operations.First;
Operation node;
Operation prevNode;
while (node != null)
for (node = block.Operations.Last; node != default; node = prevNode)
{
Node nextNode = node.ListNext;
prevNode = node.ListPrevious;
bool isUnused = IsUnused(node);
if (!(node is Operation operation) || isUnused)
if (IsUnused(node))
{
if (isUnused)
{
RemoveNode(block, node);
RemoveNode(block, node);
modified = true;
}
node = nextNode;
modified = true;
continue;
}
else if (node.Instruction == Instruction.Phi)
{
continue;
}
ConstantFolding.RunPass(operation);
ConstantFolding.RunPass(node);
Simplification.RunPass(node);
Simplification.RunPass(operation);
if (DestIsLocalVar(operation))
if (DestIsLocalVar(node))
{
if (IsPropagableCompare(operation))
if (IsPropagableCompare(node))
{
modified |= PropagateCompare(operation);
modified |= PropagateCompare(ref buffer, node);
if (modified && IsUnused(operation))
if (modified && IsUnused(node))
{
RemoveNode(block, node);
}
}
else if (IsPropagableCopy(operation))
else if (IsPropagableCopy(node))
{
PropagateCopy(operation);
PropagateCopy(ref buffer, node);
RemoveNode(block, node);
modified = true;
}
}
node = nextNode;
}
}
}
@ -80,13 +78,14 @@ namespace ARMeilleure.CodeGen.Optimizations
{
modified = false;
for (BasicBlock block = cfg.Blocks.First; block != null; block = block.ListNext)
for (BasicBlock block = cfg.Blocks.Last; block != null; block = block.ListPrevious)
{
Node node = block.Operations.First;
Operation node;
Operation prevNode;
while (node != null)
for (node = block.Operations.Last; node != default; node = prevNode)
{
Node nextNode = node.ListNext;
prevNode = node.ListPrevious;
if (IsUnused(node))
{
@ -94,15 +93,27 @@ namespace ARMeilleure.CodeGen.Optimizations
modified = true;
}
node = nextNode;
}
}
}
while (modified);
}
private static bool PropagateCompare(Operation compOp)
private static Span<Operation> GetUses(ref Span<Operation> buffer, Operand operand)
{
ReadOnlySpan<Operation> uses = operand.Uses;
if (buffer.Length < uses.Length)
{
buffer = Allocators.Default.AllocateSpan<Operation>((uint)uses.Length);
}
uses.CopyTo(buffer);
return buffer.Slice(0, uses.Length);
}
private static bool PropagateCompare(ref Span<Operation> buffer, Operation compOp)
{
// Try to propagate Compare operations into their BranchIf uses, when these BranchIf uses are in the form
// of:
@ -149,17 +160,12 @@ namespace ARMeilleure.CodeGen.Optimizations
Comparison compType = (Comparison)comp.AsInt32();
Node[] uses = dest.Uses.ToArray();
Span<Operation> uses = GetUses(ref buffer, dest);
foreach (Node use in uses)
foreach (Operation use in uses)
{
if (!(use is Operation operation))
{
continue;
}
// If operation is a BranchIf and has a constant value 0 in its RHS or LHS source operands.
if (IsZeroBranch(operation, out Comparison otherCompType))
if (IsZeroBranch(use, out Comparison otherCompType))
{
Comparison propCompType;
@ -176,9 +182,9 @@ namespace ARMeilleure.CodeGen.Optimizations
continue;
}
operation.SetSource(0, src1);
operation.SetSource(1, src2);
operation.SetSource(2, Const((int)propCompType));
use.SetSource(0, src1);
use.SetSource(1, src2);
use.SetSource(2, Const((int)propCompType));
modified = true;
}
@ -187,15 +193,15 @@ namespace ARMeilleure.CodeGen.Optimizations
return modified;
}
private static void PropagateCopy(Operation copyOp)
private static void PropagateCopy(ref Span<Operation> buffer, Operation copyOp)
{
// Propagate copy source operand to all uses of the destination operand.
Operand dest = copyOp.Destination;
Operand source = copyOp.GetSource(0);
Node[] uses = dest.Uses.ToArray();
Span<Operation> uses = GetUses(ref buffer, dest);
foreach (Node use in uses)
foreach (Operation use in uses)
{
for (int index = 0; index < use.SourcesCount; index++)
{
@ -207,7 +213,7 @@ namespace ARMeilleure.CodeGen.Optimizations
}
}
private static void RemoveNode(BasicBlock block, Node node)
private static void RemoveNode(BasicBlock block, Operation node)
{
// Remove a node from the nodes list, and also remove itself
// from all the use lists on the operands that this node uses.
@ -215,31 +221,31 @@ namespace ARMeilleure.CodeGen.Optimizations
for (int index = 0; index < node.SourcesCount; index++)
{
node.SetSource(index, null);
node.SetSource(index, default);
}
Debug.Assert(node.Destination == null || node.Destination.Uses.Count == 0);
Debug.Assert(node.Destination == default || node.Destination.UsesCount == 0);
node.Destination = null;
node.Destination = default;
}
private static bool IsUnused(Node node)
private static bool IsUnused(Operation node)
{
return DestIsLocalVar(node) && node.Destination.Uses.Count == 0 && !HasSideEffects(node);
return DestIsLocalVar(node) && node.Destination.UsesCount == 0 && !HasSideEffects(node);
}
private static bool DestIsLocalVar(Node node)
private static bool DestIsLocalVar(Operation node)
{
return node.Destination != null && node.Destination.Kind == OperandKind.LocalVariable;
return node.Destination != default && node.Destination.Kind == OperandKind.LocalVariable;
}
private static bool HasSideEffects(Node node)
private static bool HasSideEffects(Operation node)
{
return (node is Operation operation) && (operation.Instruction == Instruction.Call
|| operation.Instruction == Instruction.Tailcall
|| operation.Instruction == Instruction.CompareAndSwap
|| operation.Instruction == Instruction.CompareAndSwap16
|| operation.Instruction == Instruction.CompareAndSwap8);
return node.Instruction == Instruction.Call
|| node.Instruction == Instruction.Tailcall
|| node.Instruction == Instruction.CompareAndSwap
|| node.Instruction == Instruction.CompareAndSwap16
|| node.Instruction == Instruction.CompareAndSwap8;
}
private static bool IsPropagableCompare(Operation operation)

View file

@ -1,7 +1,6 @@
using ARMeilleure.IntermediateRepresentation;
using System;
using static ARMeilleure.IntermediateRepresentation.OperandHelper;
using static ARMeilleure.IntermediateRepresentation.Operand.Factory;
namespace ARMeilleure.CodeGen.Optimizations
{

View file

@ -1,9 +1,8 @@
using ARMeilleure.IntermediateRepresentation;
using System;
using System.Collections.Generic;
using static ARMeilleure.IntermediateRepresentation.OperandHelper;
using static ARMeilleure.IntermediateRepresentation.OperationHelper;
using static ARMeilleure.IntermediateRepresentation.Operand.Factory;
using static ARMeilleure.IntermediateRepresentation.Operation.Factory;
namespace ARMeilleure.CodeGen.RegisterAllocators
{
@ -210,7 +209,7 @@ namespace ARMeilleure.CodeGen.RegisterAllocators
Operand register = GetRegister(left.Register, type);
_spillQueue.Enqueue(Operation(Instruction.Spill, null, offset, register));
_spillQueue.Enqueue(Operation(Instruction.Spill, default, offset, register));
HasCopy = true;
}

View file

@ -1,11 +1,10 @@
using ARMeilleure.IntermediateRepresentation;
using ARMeilleure.Translation;
using System.Collections.Generic;
using System;
using System.Diagnostics;
using System.Numerics;
using static ARMeilleure.IntermediateRepresentation.OperandHelper;
using static ARMeilleure.IntermediateRepresentation.OperationHelper;
using static ARMeilleure.IntermediateRepresentation.Operand.Factory;
using static ARMeilleure.IntermediateRepresentation.Operation.Factory;
namespace ARMeilleure.CodeGen.RegisterAllocators
{
@ -29,19 +28,14 @@ namespace ARMeilleure.CodeGen.RegisterAllocators
}
}
private class LocalInfo
private struct LocalInfo
{
public int Uses { get; set; }
public int UseCount { get; set; }
public bool PreAllocated { get; set; }
public int Register { get; set; }
public int SpillOffset { get; set; }
public int Uses { get; set; }
public int UsesAllocated { get; set; }
public int Register { get; set; }
public int SpillOffset { get; set; }
public int Sequence { get; set; }
public Operand Temp { get; set; }
public OperandType Type { get; }
private int _first;
@ -49,13 +43,21 @@ namespace ARMeilleure.CodeGen.RegisterAllocators
public bool IsBlockLocal => _first == _last;
public LocalInfo(OperandType type, int uses)
public LocalInfo(OperandType type, int uses, int blkIndex)
{
Uses = uses;
Type = type;
UsesAllocated = 0;
Register = 0;
SpillOffset = 0;
Sequence = 0;
Temp = default;
_first = -1;
_last = -1;
SetBlockIndex(blkIndex);
}
public void SetBlockIndex(int blkIndex)
@ -72,10 +74,7 @@ namespace ARMeilleure.CodeGen.RegisterAllocators
}
}
public AllocationResult RunPass(
ControlFlowGraph cfg,
StackAllocator stackAlloc,
RegisterMasks regMasks)
public AllocationResult RunPass(ControlFlowGraph cfg, StackAllocator stackAlloc, RegisterMasks regMasks)
{
int intUsedRegisters = 0;
int vecUsedRegisters = 0;
@ -84,9 +83,33 @@ namespace ARMeilleure.CodeGen.RegisterAllocators
int vecFreeRegisters = regMasks.VecAvailableRegisters;
var blockInfo = new BlockInfo[cfg.Blocks.Count];
var localInfo = new LocalInfo[cfg.Blocks.Count * 3];
int localInfoCount = 0;
var locInfo = new List<LocalInfo>();
var locVisited = new HashSet<Operand>();
// The "visited" state is stored in the MSB of the local's value.
const ulong VisitedMask = 1ul << 63;
bool IsVisited(Operand local)
{
return (local.GetValueUnsafe() & VisitedMask) != 0;
}
void SetVisited(Operand local)
{
local.GetValueUnsafe() |= VisitedMask | (uint)++localInfoCount;
}
ref LocalInfo GetLocalInfo(Operand local)
{
Debug.Assert(local.Kind == OperandKind.LocalVariable);
if (!IsVisited(local))
{
throw new InvalidOperationException("Local was not visisted yet. Used before defined?");
}
return ref localInfo[(uint)local.GetValueUnsafe() - 1];
}
for (int index = cfg.PostOrderBlocks.Length - 1; index >= 0; index--)
{
@ -97,59 +120,58 @@ namespace ARMeilleure.CodeGen.RegisterAllocators
bool hasCall = false;
for (Node node = block.Operations.First; node != null; node = node.ListNext)
for (Operation node = block.Operations.First; node != default; node = node.ListNext)
{
if (node is Operation operation && operation.Instruction == Instruction.Call)
if (node.Instruction == Instruction.Call)
{
hasCall = true;
}
for (int srcIndex = 0; srcIndex < node.SourcesCount; srcIndex++)
for (int i = 0; i < node.SourcesCount; i++)
{
Operand source = node.GetSource(srcIndex);
Operand source = node.GetSource(i);
if (source.Kind == OperandKind.LocalVariable)
{
locInfo[source.GetLocalNumber() - 1].SetBlockIndex(block.Index);
GetLocalInfo(source).SetBlockIndex(block.Index);
}
else if (source.Kind == OperandKind.Memory)
{
MemoryOperand memOp = (MemoryOperand)source;
MemoryOperand memOp = source.GetMemory();
if (memOp.BaseAddress != null)
if (memOp.BaseAddress != default)
{
locInfo[memOp.BaseAddress.GetLocalNumber() - 1].SetBlockIndex(block.Index);
GetLocalInfo(memOp.BaseAddress).SetBlockIndex(block.Index);
}
if (memOp.Index != null)
if (memOp.Index != default)
{
locInfo[memOp.Index.GetLocalNumber() - 1].SetBlockIndex(block.Index);
GetLocalInfo(memOp.Index).SetBlockIndex(block.Index);
}
}
}
for (int dstIndex = 0; dstIndex < node.DestinationsCount; dstIndex++)
for (int i = 0; i < node.DestinationsCount; i++)
{
Operand dest = node.GetDestination(dstIndex);
Operand dest = node.GetDestination(i);
if (dest.Kind == OperandKind.LocalVariable)
{
LocalInfo info;
if (!locVisited.Add(dest))
if (IsVisited(dest))
{
info = locInfo[dest.GetLocalNumber() - 1];
GetLocalInfo(dest).SetBlockIndex(block.Index);
}
else
{
dest.NumberLocal(locInfo.Count + 1);
SetVisited(dest);
info = new LocalInfo(dest.Type, UsesCount(dest));
if (localInfoCount > localInfo.Length)
{
Array.Resize(ref localInfo, localInfoCount * 2);
}
locInfo.Add(info);
GetLocalInfo(dest) = new LocalInfo(dest.Type, UsesCount(dest), block.Index);
}
info.SetBlockIndex(block.Index);
}
else if (dest.Kind == OperandKind.Register)
{
@ -192,42 +214,26 @@ namespace ARMeilleure.CodeGen.RegisterAllocators
intLocalFreeRegisters &= ~(intSpillTempRegisters | intCallerSavedRegisters);
vecLocalFreeRegisters &= ~(vecSpillTempRegisters | vecCallerSavedRegisters);
for (Node node = block.Operations.First; node != null; node = node.ListNext)
for (Operation node = block.Operations.First; node != default; node = node.ListNext)
{
int intLocalUse = 0;
int vecLocalUse = 0;
void AllocateRegister(Operand source, MemoryOperand memOp, int srcIndex)
Operand AllocateRegister(Operand local)
{
LocalInfo info = locInfo[source.GetLocalNumber() - 1];
ref LocalInfo info = ref GetLocalInfo(local);
info.UseCount++;
info.UsesAllocated++;
Debug.Assert(info.UseCount <= info.Uses);
Debug.Assert(info.UsesAllocated <= info.Uses);
if (info.Register != -1)
{
Operand reg = Register(info.Register, source.Type.ToRegisterType(), source.Type);
Operand reg = Register(info.Register, local.Type.ToRegisterType(), local.Type);
if (memOp != null)
if (info.UsesAllocated == info.Uses)
{
if (srcIndex == 0)
{
memOp.BaseAddress = reg;
}
else /* if (srcIndex == 1) */
{
memOp.Index = reg;
}
}
else
{
node.SetSource(srcIndex, reg);
}
if (info.UseCount == info.Uses && !info.PreAllocated)
{
if (source.Type.IsInteger())
if (local.Type.IsInteger())
{
intLocalFreeRegisters |= 1 << info.Register;
}
@ -236,72 +242,80 @@ namespace ARMeilleure.CodeGen.RegisterAllocators
vecLocalFreeRegisters |= 1 << info.Register;
}
}
}
else if (node is Operation operation && operation.Instruction == Instruction.Copy)
{
Operation fillOp = Operation(Instruction.Fill, node.Destination, Const(info.SpillOffset));
block.Operations.AddBefore(node, fillOp);
block.Operations.Remove(node);
node = fillOp;
return reg;
}
else
{
Operand temp = info.Temp;
if (temp == null || info.Sequence != sequence)
if (temp == default || info.Sequence != sequence)
{
temp = source.Type.IsInteger()
? GetSpillTemp(source, intSpillTempRegisters, ref intLocalUse)
: GetSpillTemp(source, vecSpillTempRegisters, ref vecLocalUse);
temp = local.Type.IsInteger()
? GetSpillTemp(local, intSpillTempRegisters, ref intLocalUse)
: GetSpillTemp(local, vecSpillTempRegisters, ref vecLocalUse);
info.Sequence = sequence;
info.Temp = temp;
}
if (memOp != null)
{
if (srcIndex == 0)
{
memOp.BaseAddress = temp;
}
else /* if (srcIndex == 1) */
{
memOp.Index = temp;
}
}
else
{
node.SetSource(srcIndex, temp);
}
Operation fillOp = Operation(Instruction.Fill, temp, Const(info.SpillOffset));
block.Operations.AddBefore(node, fillOp);
return temp;
}
}
for (int srcIndex = 0; srcIndex < node.SourcesCount; srcIndex++)
bool folded = false;
// If operation is a copy of a local and that local is living on the stack, we turn the copy into
// a fill, instead of inserting a fill before it.
if (node.Instruction == Instruction.Copy)
{
Operand source = node.GetSource(srcIndex);
Operand source = node.GetSource(0);
if (source.Kind == OperandKind.LocalVariable)
{
AllocateRegister(source, null, srcIndex);
}
else if (source.Kind == OperandKind.Memory)
{
MemoryOperand memOp = (MemoryOperand)source;
ref LocalInfo info = ref GetLocalInfo(source);
if (memOp.BaseAddress != null)
if (info.Register == -1)
{
AllocateRegister(memOp.BaseAddress, memOp, 0);
Operation fillOp = Operation(Instruction.Fill, node.Destination, Const(info.SpillOffset));
block.Operations.AddBefore(node, fillOp);
block.Operations.Remove(node);
node = fillOp;
folded = true;
}
}
}
if (memOp.Index != null)
if (!folded)
{
for (int i = 0; i < node.SourcesCount; i++)
{
Operand source = node.GetSource(i);
if (source.Kind == OperandKind.LocalVariable)
{
AllocateRegister(memOp.Index, memOp, 1);
node.SetSource(i, AllocateRegister(source));
}
else if (source.Kind == OperandKind.Memory)
{
MemoryOperand memOp = source.GetMemory();
if (memOp.BaseAddress != default)
{
memOp.BaseAddress = AllocateRegister(memOp.BaseAddress);
}
if (memOp.Index != default)
{
memOp.Index = AllocateRegister(memOp.Index);
}
}
}
}
@ -309,18 +323,18 @@ namespace ARMeilleure.CodeGen.RegisterAllocators
int intLocalAsg = 0;
int vecLocalAsg = 0;
for (int dstIndex = 0; dstIndex < node.DestinationsCount; dstIndex++)
for (int i = 0; i < node.DestinationsCount; i++)
{
Operand dest = node.GetDestination(dstIndex);
Operand dest = node.GetDestination(i);
if (dest.Kind != OperandKind.LocalVariable)
{
continue;
}
LocalInfo info = locInfo[dest.GetLocalNumber() - 1];
ref LocalInfo info = ref GetLocalInfo(dest);
if (info.UseCount == 0 && !info.PreAllocated)
if (info.UsesAllocated == 0)
{
int mask = dest.Type.IsInteger()
? intLocalFreeRegisters
@ -350,19 +364,19 @@ namespace ARMeilleure.CodeGen.RegisterAllocators
}
}
info.UseCount++;
info.UsesAllocated++;
Debug.Assert(info.UseCount <= info.Uses);
Debug.Assert(info.UsesAllocated <= info.Uses);
if (info.Register != -1)
{
node.SetDestination(dstIndex, Register(info.Register, dest.Type.ToRegisterType(), dest.Type));
node.SetDestination(i, Register(info.Register, dest.Type.ToRegisterType(), dest.Type));
}
else
{
Operand temp = info.Temp;
if (temp == null || info.Sequence != sequence)
if (temp == default || info.Sequence != sequence)
{
temp = dest.Type.IsInteger()
? GetSpillTemp(dest, intSpillTempRegisters, ref intLocalAsg)
@ -372,9 +386,9 @@ namespace ARMeilleure.CodeGen.RegisterAllocators
info.Temp = temp;
}
node.SetDestination(dstIndex, temp);
node.SetDestination(i, temp);
Operation spillOp = Operation(Instruction.Spill, null, Const(info.SpillOffset), temp);
Operation spillOp = Operation(Instruction.Spill, default, Const(info.SpillOffset), temp);
block.Operations.AddAfter(node, spillOp);
@ -435,7 +449,7 @@ namespace ARMeilleure.CodeGen.RegisterAllocators
private static int UsesCount(Operand local)
{
return local.Assignments.Count + local.Uses.Count;
return local.AssignmentsCount + local.UsesCount;
}
}
}

View file

@ -29,11 +29,11 @@ namespace ARMeilleure.CodeGen.RegisterAllocators
private LiveInterval[] _parentIntervals;
private List<(IntrusiveList<Node>, Node)> _operationNodes;
private List<(IntrusiveList<Operation>, Operation)> _operationNodes;
private int _operationsCount;
private class AllocationContext : IDisposable
private class AllocationContext
{
public RegisterMasks Masks { get; }
@ -50,10 +50,8 @@ namespace ARMeilleure.CodeGen.RegisterAllocators
StackAlloc = stackAlloc;
Masks = masks;
BitMapPool.PrepareBitMapPool();
Active = BitMapPool.Allocate(intervalsCount);
Inactive = BitMapPool.Allocate(intervalsCount);
Active = new BitMap(Allocators.Default, intervalsCount);
Inactive = new BitMap(Allocators.Default, intervalsCount);
}
public void MoveActiveToInactive(int bit)
@ -72,11 +70,6 @@ namespace ARMeilleure.CodeGen.RegisterAllocators
dest.Set(bit);
}
public void Dispose()
{
BitMapPool.ResetBitMapPool();
}
}
public AllocationResult RunPass(
@ -86,7 +79,7 @@ namespace ARMeilleure.CodeGen.RegisterAllocators
{
NumberLocals(cfg);
using AllocationContext context = new AllocationContext(stackAlloc, regMasks, _intervals.Count);
var context = new AllocationContext(stackAlloc, regMasks, _intervals.Count);
BuildIntervals(cfg, context);
@ -588,7 +581,7 @@ namespace ARMeilleure.CodeGen.RegisterAllocators
int splitPosition = kv.Key;
(IntrusiveList<Node> nodes, Node node) = GetOperationNode(splitPosition);
(IntrusiveList<Operation> nodes, Operation node) = GetOperationNode(splitPosition);
Operation[] sequence = copyResolver.Sequence();
@ -621,9 +614,9 @@ namespace ARMeilleure.CodeGen.RegisterAllocators
continue;
}
bool hasSingleOrNoSuccessor = block.SuccessorCount <= 1;
bool hasSingleOrNoSuccessor = block.SuccessorsCount <= 1;
for (int i = 0; i < block.SuccessorCount; i++)
for (int i = 0; i < block.SuccessorsCount; i++)
{
BasicBlock successor = block.GetSuccessor(i);
@ -677,7 +670,7 @@ namespace ARMeilleure.CodeGen.RegisterAllocators
{
successor.Operations.AddFirst(sequence[0]);
Node prependNode = sequence[0];
Operation prependNode = sequence[0];
for (int index = 1; index < sequence.Length; index++)
{
@ -710,7 +703,7 @@ namespace ARMeilleure.CodeGen.RegisterAllocators
for (int i = usePositions.Count - 1; i >= 0; i--)
{
int usePosition = -usePositions[i];
(_, Node operation) = GetOperationNode(usePosition);
(_, Operation operation) = GetOperationNode(usePosition);
for (int index = 0; index < operation.SourcesCount; index++)
{
@ -722,7 +715,7 @@ namespace ARMeilleure.CodeGen.RegisterAllocators
}
else if (source.Kind == OperandKind.Memory)
{
MemoryOperand memOp = (MemoryOperand)source;
MemoryOperand memOp = source.GetMemory();
if (memOp.BaseAddress == current.Local)
{
@ -752,20 +745,20 @@ namespace ARMeilleure.CodeGen.RegisterAllocators
{
Debug.Assert(!interval.IsSpilled, "Spilled intervals are not allowed.");
return OperandHelper.Register(
return Operand.Factory.Register(
interval.Register.Index,
interval.Register.Type,
interval.Local.Type);
}
private (IntrusiveList<Node>, Node) GetOperationNode(int position)
private (IntrusiveList<Operation>, Operation) GetOperationNode(int position)
{
return _operationNodes[position / InstructionGap];
}
private void NumberLocals(ControlFlowGraph cfg)
{
_operationNodes = new List<(IntrusiveList<Node>, Node)>();
_operationNodes = new List<(IntrusiveList<Operation>, Operation)>();
_intervals = new List<LiveInterval>();
@ -783,13 +776,14 @@ namespace ARMeilleure.CodeGen.RegisterAllocators
{
BasicBlock block = cfg.PostOrderBlocks[index];
for (Node node = block.Operations.First; node != null; node = node.ListNext)
for (Operation node = block.Operations.First; node != default; node = node.ListNext)
{
_operationNodes.Add((block.Operations, node));
for (int i = 0; i < node.DestinationsCount; i++)
{
Operand dest = node.GetDestination(i);
if (dest.Kind == OperandKind.LocalVariable && visited.Add(dest))
{
dest.NumberLocal(_intervals.Count);
@ -804,7 +798,7 @@ namespace ARMeilleure.CodeGen.RegisterAllocators
if (block.Operations.Count == 0)
{
// Pretend we have a dummy instruction on the empty block.
_operationNodes.Add((null, null));
_operationNodes.Add((default, default));
_operationsCount += InstructionGap;
}
@ -825,10 +819,10 @@ namespace ARMeilleure.CodeGen.RegisterAllocators
// Compute local live sets.
for (BasicBlock block = cfg.Blocks.First; block != null; block = block.ListNext)
{
BitMap liveGen = BitMapPool.Allocate(mapSize);
BitMap liveKill = BitMapPool.Allocate(mapSize);
BitMap liveGen = new BitMap(Allocators.Default, mapSize);
BitMap liveKill = new BitMap(Allocators.Default, mapSize);
for (Node node = block.Operations.First; node != null; node = node.ListNext)
for (Operation node = block.Operations.First; node != default; node = node.ListNext)
{
Sources(node, (source) =>
{
@ -857,8 +851,8 @@ namespace ARMeilleure.CodeGen.RegisterAllocators
for (int index = 0; index < cfg.Blocks.Count; index++)
{
blkLiveIn [index] = BitMapPool.Allocate(mapSize);
blkLiveOut[index] = BitMapPool.Allocate(mapSize);
blkLiveIn [index] = new BitMap(Allocators.Default, mapSize);
blkLiveOut[index] = new BitMap(Allocators.Default, mapSize);
}
bool modified;
@ -873,7 +867,7 @@ namespace ARMeilleure.CodeGen.RegisterAllocators
BitMap liveOut = blkLiveOut[block.Index];
for (int i = 0; i < block.SuccessorCount; i++)
for (int i = 0; i < block.SuccessorsCount; i++)
{
BasicBlock succ = block.GetSuccessor(i);
@ -926,7 +920,7 @@ namespace ARMeilleure.CodeGen.RegisterAllocators
continue;
}
foreach (Node node in BottomOperations(block))
foreach (Operation node in BottomOperations(block))
{
operationPos -= InstructionGap;
@ -947,7 +941,7 @@ namespace ARMeilleure.CodeGen.RegisterAllocators
interval.AddUsePosition(operationPos);
});
if (node is Operation operation && operation.Instruction == Instruction.Call)
if (node.Instruction == Instruction.Call)
{
AddIntervalCallerSavedReg(context.Masks.IntCallerSavedRegisters, operationPos, RegisterType.Integer);
AddIntervalCallerSavedReg(context.Masks.VecCallerSavedRegisters, operationPos, RegisterType.Vector);
@ -993,11 +987,11 @@ namespace ARMeilleure.CodeGen.RegisterAllocators
return (register.Index << 1) | (register.Type == RegisterType.Vector ? 1 : 0);
}
private static IEnumerable<Node> BottomOperations(BasicBlock block)
private static IEnumerable<Operation> BottomOperations(BasicBlock block)
{
Node node = block.Operations.Last;
Operation node = block.Operations.Last;
while (node != null && !(node is PhiNode))
while (node != default)
{
yield return node;
@ -1005,7 +999,7 @@ namespace ARMeilleure.CodeGen.RegisterAllocators
}
}
private static void Sources(Node node, Action<Operand> action)
private static void Sources(Operation node, Action<Operand> action)
{
for (int index = 0; index < node.SourcesCount; index++)
{
@ -1017,14 +1011,14 @@ namespace ARMeilleure.CodeGen.RegisterAllocators
}
else if (source.Kind == OperandKind.Memory)
{
MemoryOperand memOp = (MemoryOperand)source;
MemoryOperand memOp = source.GetMemory();
if (memOp.BaseAddress != null)
if (memOp.BaseAddress != default)
{
action(memOp.BaseAddress);
}
if (memOp.Index != null)
if (memOp.Index != default)
{
action(memOp.Index);
}

View file

@ -34,7 +34,7 @@ namespace ARMeilleure.CodeGen.RegisterAllocators
public bool IsEmpty => _ranges.Count == 0;
public LiveInterval(Operand local = null, LiveInterval parent = null)
public LiveInterval(Operand local = default, LiveInterval parent = null)
{
Local = local;
_parent = parent ?? this;

View file

@ -329,12 +329,12 @@ namespace ARMeilleure.CodeGen.X86
public void Bswap(Operand dest)
{
WriteInstruction(dest, null, dest.Type, X86Instruction.Bswap);
WriteInstruction(dest, default, dest.Type, X86Instruction.Bswap);
}
public void Call(Operand dest)
{
WriteInstruction(dest, null, OperandType.None, X86Instruction.Call);
WriteInstruction(dest, default, OperandType.None, X86Instruction.Call);
}
public void Cdq()
@ -346,7 +346,7 @@ namespace ARMeilleure.CodeGen.X86
{
InstructionInfo info = _instTable[(int)X86Instruction.Cmovcc];
WriteOpCode(dest, null, source, type, info.Flags, info.OpRRM | (int)condition, rrm: true);
WriteOpCode(dest, default, source, type, info.Flags, info.OpRRM | (int)condition, rrm: true);
}
public void Cmp(Operand src1, Operand src2, OperandType type)
@ -360,30 +360,38 @@ namespace ARMeilleure.CodeGen.X86
WriteByte(0x99);
}
public void Cmpxchg(MemoryOperand memOp, Operand src)
public void Cmpxchg(Operand memOp, Operand src)
{
Debug.Assert(memOp.Kind == OperandKind.Memory);
WriteByte(LockPrefix);
WriteInstruction(memOp, src, src.Type, X86Instruction.Cmpxchg);
}
public void Cmpxchg16(MemoryOperand memOp, Operand src)
public void Cmpxchg16(Operand memOp, Operand src)
{
Debug.Assert(memOp.Kind == OperandKind.Memory);
WriteByte(LockPrefix);
WriteByte(0x66);
WriteInstruction(memOp, src, src.Type, X86Instruction.Cmpxchg);
}
public void Cmpxchg16b(MemoryOperand memOp)
public void Cmpxchg16b(Operand memOp)
{
Debug.Assert(memOp.Kind == OperandKind.Memory);
WriteByte(LockPrefix);
WriteInstruction(memOp, null, OperandType.None, X86Instruction.Cmpxchg16b);
WriteInstruction(memOp, default, OperandType.None, X86Instruction.Cmpxchg16b);
}
public void Cmpxchg8(MemoryOperand memOp, Operand src)
public void Cmpxchg8(Operand memOp, Operand src)
{
Debug.Assert(memOp.Kind == OperandKind.Memory);
WriteByte(LockPrefix);
WriteInstruction(memOp, src, src.Type, X86Instruction.Cmpxchg8);
@ -391,12 +399,12 @@ namespace ARMeilleure.CodeGen.X86
public void Comisd(Operand src1, Operand src2)
{
WriteInstruction(src1, null, src2, X86Instruction.Comisd);
WriteInstruction(src1, default, src2, X86Instruction.Comisd);
}
public void Comiss(Operand src1, Operand src2)
{
WriteInstruction(src1, null, src2, X86Instruction.Comiss);
WriteInstruction(src1, default, src2, X86Instruction.Comiss);
}
public void Cvtsd2ss(Operand dest, Operand src1, Operand src2)
@ -421,7 +429,7 @@ namespace ARMeilleure.CodeGen.X86
public void Div(Operand source)
{
WriteInstruction(null, source, source.Type, X86Instruction.Div);
WriteInstruction(default, source, source.Type, X86Instruction.Div);
}
public void Divsd(Operand dest, Operand src1, Operand src2)
@ -436,12 +444,12 @@ namespace ARMeilleure.CodeGen.X86
public void Idiv(Operand source)
{
WriteInstruction(null, source, source.Type, X86Instruction.Idiv);
WriteInstruction(default, source, source.Type, X86Instruction.Idiv);
}
public void Imul(Operand source)
{
WriteInstruction(null, source, source.Type, X86Instruction.Imul128);
WriteInstruction(default, source, source.Type, X86Instruction.Imul128);
}
public void Imul(Operand dest, Operand source, OperandType type)
@ -465,13 +473,13 @@ namespace ARMeilleure.CodeGen.X86
if (IsImm8(src2.Value, src2.Type) && info.OpRMImm8 != BadOp)
{
WriteOpCode(dest, null, src1, type, info.Flags, info.OpRMImm8, rrm: true);
WriteOpCode(dest, default, src1, type, info.Flags, info.OpRMImm8, rrm: true);
WriteByte(src2.AsByte());
}
else if (IsImm32(src2.Value, src2.Type) && info.OpRMImm32 != BadOp)
{
WriteOpCode(dest, null, src1, type, info.Flags, info.OpRMImm32, rrm: true);
WriteOpCode(dest, default, src1, type, info.Flags, info.OpRMImm32, rrm: true);
WriteInt32(src2.AsInt32());
}
@ -531,12 +539,12 @@ namespace ARMeilleure.CodeGen.X86
public void Jmp(Operand dest)
{
WriteInstruction(dest, null, OperandType.None, X86Instruction.Jmp);
WriteInstruction(dest, default, OperandType.None, X86Instruction.Jmp);
}
public void Ldmxcsr(Operand dest)
{
WriteInstruction(dest, null, OperandType.I32, X86Instruction.Ldmxcsr);
WriteInstruction(dest, default, OperandType.I32, X86Instruction.Ldmxcsr);
}
public void Lea(Operand dest, Operand source, OperandType type)
@ -565,17 +573,17 @@ namespace ARMeilleure.CodeGen.X86
if (source.Type.IsInteger() || source.Kind == OperandKind.Memory)
{
WriteOpCode(dest, null, source, OperandType.None, info.Flags, info.OpRRM, rrm: true);
WriteOpCode(dest, default, source, OperandType.None, info.Flags, info.OpRRM, rrm: true);
}
else
{
WriteOpCode(dest, null, source, OperandType.None, info.Flags, info.OpRMR);
WriteOpCode(dest, default, source, OperandType.None, info.Flags, info.OpRMR);
}
}
public void Movdqu(Operand dest, Operand source)
{
WriteInstruction(dest, null, source, X86Instruction.Movdqu);
WriteInstruction(dest, default, source, X86Instruction.Movdqu);
}
public void Movhlps(Operand dest, Operand src1, Operand src2)
@ -596,11 +604,11 @@ namespace ARMeilleure.CodeGen.X86
if (source.Type.IsInteger() || source.Kind == OperandKind.Memory)
{
WriteOpCode(dest, null, source, OperandType.None, flags, info.OpRRM, rrm: true);
WriteOpCode(dest, default, source, OperandType.None, flags, info.OpRRM, rrm: true);
}
else if (dest.Type.IsInteger() || dest.Kind == OperandKind.Memory)
{
WriteOpCode(dest, null, source, OperandType.None, flags, info.OpRMR);
WriteOpCode(dest, default, source, OperandType.None, flags, info.OpRMR);
}
else
{
@ -645,7 +653,7 @@ namespace ARMeilleure.CodeGen.X86
public void Mul(Operand source)
{
WriteInstruction(null, source, source.Type, X86Instruction.Mul128);
WriteInstruction(default, source, source.Type, X86Instruction.Mul128);
}
public void Mulsd(Operand dest, Operand src1, Operand src2)
@ -660,12 +668,12 @@ namespace ARMeilleure.CodeGen.X86
public void Neg(Operand dest)
{
WriteInstruction(dest, null, dest.Type, X86Instruction.Neg);
WriteInstruction(dest, default, dest.Type, X86Instruction.Neg);
}
public void Not(Operand dest)
{
WriteInstruction(dest, null, dest.Type, X86Instruction.Not);
WriteInstruction(dest, default, dest.Type, X86Instruction.Not);
}
public void Or(Operand dest, Operand source, OperandType type)
@ -675,7 +683,7 @@ namespace ARMeilleure.CodeGen.X86
public void Pclmulqdq(Operand dest, Operand source, byte imm)
{
WriteInstruction(dest, null, source, X86Instruction.Pclmulqdq);
WriteInstruction(dest, default, source, X86Instruction.Pclmulqdq);
WriteByte(imm);
}
@ -687,28 +695,28 @@ namespace ARMeilleure.CodeGen.X86
public void Pextrb(Operand dest, Operand source, byte imm)
{
WriteInstruction(dest, null, source, X86Instruction.Pextrb);
WriteInstruction(dest, default, source, X86Instruction.Pextrb);
WriteByte(imm);
}
public void Pextrd(Operand dest, Operand source, byte imm)
{
WriteInstruction(dest, null, source, X86Instruction.Pextrd);
WriteInstruction(dest, default, source, X86Instruction.Pextrd);
WriteByte(imm);
}
public void Pextrq(Operand dest, Operand source, byte imm)
{
WriteInstruction(dest, null, source, X86Instruction.Pextrq);
WriteInstruction(dest, default, source, X86Instruction.Pextrq);
WriteByte(imm);
}
public void Pextrw(Operand dest, Operand source, byte imm)
{
WriteInstruction(dest, null, source, X86Instruction.Pextrw);
WriteInstruction(dest, default, source, X86Instruction.Pextrw);
WriteByte(imm);
}
@ -749,7 +757,7 @@ namespace ARMeilleure.CodeGen.X86
}
else
{
WriteInstruction(dest, null, dest.Type, X86Instruction.Pop);
WriteInstruction(dest, default, dest.Type, X86Instruction.Pop);
}
}
@ -760,7 +768,7 @@ namespace ARMeilleure.CodeGen.X86
public void Pshufd(Operand dest, Operand source, byte imm)
{
WriteInstruction(dest, null, source, X86Instruction.Pshufd);
WriteInstruction(dest, default, source, X86Instruction.Pshufd);
WriteByte(imm);
}
@ -773,7 +781,7 @@ namespace ARMeilleure.CodeGen.X86
}
else
{
WriteInstruction(null, source, source.Type, X86Instruction.Push);
WriteInstruction(default, source, source.Type, X86Instruction.Push);
}
}
@ -806,12 +814,12 @@ namespace ARMeilleure.CodeGen.X86
{
InstructionInfo info = _instTable[(int)X86Instruction.Setcc];
WriteOpCode(dest, null, null, OperandType.None, info.Flags, info.OpRRM | (int)condition);
WriteOpCode(dest, default, default, OperandType.None, info.Flags, info.OpRRM | (int)condition);
}
public void Stmxcsr(Operand dest)
{
WriteInstruction(dest, null, OperandType.I32, X86Instruction.Stmxcsr);
WriteInstruction(dest, default, OperandType.I32, X86Instruction.Stmxcsr);
}
public void Sub(Operand dest, Operand source, OperandType type)
@ -850,7 +858,7 @@ namespace ARMeilleure.CodeGen.X86
Operand source,
OperandType type = OperandType.None)
{
WriteInstruction(dest, null, source, inst, type);
WriteInstruction(dest, default, source, inst, type);
}
public void WriteInstruction(X86Instruction inst, Operand dest, Operand src1, Operand src2)
@ -877,7 +885,7 @@ namespace ARMeilleure.CodeGen.X86
public void WriteInstruction(X86Instruction inst, Operand dest, Operand source, byte imm)
{
WriteInstruction(dest, null, source, inst);
WriteInstruction(dest, default, source, inst);
WriteByte(imm);
}
@ -917,11 +925,11 @@ namespace ARMeilleure.CodeGen.X86
Debug.Assert(shiftReg == X86Register.Rcx, $"Invalid shift register \"{shiftReg}\".");
source = null;
source = default;
}
else if (source.Kind == OperandKind.Constant)
{
source = source.With((int)source.Value & (dest.Type == OperandType.I32 ? 0x1f : 0x3f));
source = Operand.Factory.Const((int)source.Value & (dest.Type == OperandType.I32 ? 0x1f : 0x3f));
}
WriteInstruction(dest, source, type, inst);
@ -931,7 +939,7 @@ namespace ARMeilleure.CodeGen.X86
{
InstructionInfo info = _instTable[(int)inst];
if (source != null)
if (source != default)
{
if (source.Kind == OperandKind.Constant)
{
@ -939,29 +947,29 @@ namespace ARMeilleure.CodeGen.X86
if (inst == X86Instruction.Mov8)
{
WriteOpCode(dest, null, null, type, info.Flags, info.OpRMImm8);
WriteOpCode(dest, default, default, type, info.Flags, info.OpRMImm8);
WriteByte((byte)imm);
}
else if (inst == X86Instruction.Mov16)
{
WriteOpCode(dest, null, null, type, info.Flags, info.OpRMImm32);
WriteOpCode(dest, default, default, type, info.Flags, info.OpRMImm32);
WriteInt16((short)imm);
}
else if (IsImm8(imm, type) && info.OpRMImm8 != BadOp)
{
WriteOpCode(dest, null, null, type, info.Flags, info.OpRMImm8);
WriteOpCode(dest, default, default, type, info.Flags, info.OpRMImm8);
WriteByte((byte)imm);
}
else if (!source.Relocatable && IsImm32(imm, type) && info.OpRMImm32 != BadOp)
{
WriteOpCode(dest, null, null, type, info.Flags, info.OpRMImm32);
WriteOpCode(dest, default, default, type, info.Flags, info.OpRMImm32);
WriteInt32((int)imm);
}
else if (dest?.Kind == OperandKind.Register && info.OpRImm64 != BadOp)
else if (dest != default && dest.Kind == OperandKind.Register && info.OpRImm64 != BadOp)
{
int rexPrefix = GetRexPrefix(dest, source, type, rrm: false);
@ -972,7 +980,7 @@ namespace ARMeilleure.CodeGen.X86
WriteByte((byte)(info.OpRImm64 + (dest.GetRegister().Index & 0b111)));
if (_ptcInfo != null && source.Relocatable)
if (_ptcInfo != default && source.Relocatable)
{
_ptcInfo.WriteRelocEntry(new RelocEntry((int)_stream.Position, source.Symbol));
}
@ -986,11 +994,11 @@ namespace ARMeilleure.CodeGen.X86
}
else if (source.Kind == OperandKind.Register && info.OpRMR != BadOp)
{
WriteOpCode(dest, null, source, type, info.Flags, info.OpRMR);
WriteOpCode(dest, default, source, type, info.Flags, info.OpRMR);
}
else if (info.OpRRM != BadOp)
{
WriteOpCode(dest, null, source, type, info.Flags, info.OpRRM, rrm: true);
WriteOpCode(dest, default, source, type, info.Flags, info.OpRRM, rrm: true);
}
else
{
@ -999,11 +1007,11 @@ namespace ARMeilleure.CodeGen.X86
}
else if (info.OpRRM != BadOp)
{
WriteOpCode(dest, null, source, type, info.Flags, info.OpRRM, rrm: true);
WriteOpCode(dest, default, source, type, info.Flags, info.OpRRM, rrm: true);
}
else if (info.OpRMR != BadOp)
{
WriteOpCode(dest, null, source, type, info.Flags, info.OpRMR);
WriteOpCode(dest, default, source, type, info.Flags, info.OpRMR);
}
else
{
@ -1020,7 +1028,7 @@ namespace ARMeilleure.CodeGen.X86
{
InstructionInfo info = _instTable[(int)inst];
if (src2 != null)
if (src2 != default)
{
if (src2.Kind == OperandKind.Constant)
{
@ -1028,7 +1036,7 @@ namespace ARMeilleure.CodeGen.X86
if ((byte)imm == imm && info.OpRMImm8 != BadOp)
{
WriteOpCode(dest, src1, null, type, info.Flags, info.OpRMImm8);
WriteOpCode(dest, src1, default, type, info.Flags, info.OpRMImm8);
WriteByte((byte)imm);
}
@ -1082,9 +1090,10 @@ namespace ARMeilleure.CodeGen.X86
int modRM = (opCode >> OpModRMBits) << 3;
MemoryOperand memOp = null;
MemoryOperand memOp = default;
bool hasMemOp = false;
if (dest != null)
if (dest != default)
{
if (dest.Kind == OperandKind.Register)
{
@ -1099,7 +1108,8 @@ namespace ARMeilleure.CodeGen.X86
}
else if (dest.Kind == OperandKind.Memory)
{
memOp = dest as MemoryOperand;
memOp = dest.GetMemory();
hasMemOp = true;
}
else
{
@ -1107,7 +1117,7 @@ namespace ARMeilleure.CodeGen.X86
}
}
if (src2 != null)
if (src2 != default)
{
if (src2.Kind == OperandKind.Register)
{
@ -1120,9 +1130,10 @@ namespace ARMeilleure.CodeGen.X86
rexPrefix |= RexPrefix;
}
}
else if (src2.Kind == OperandKind.Memory && memOp == null)
else if (src2.Kind == OperandKind.Memory && !hasMemOp)
{
memOp = src2 as MemoryOperand;
memOp = src2.GetMemory();
hasMemOp = true;
}
else
{
@ -1135,14 +1146,14 @@ namespace ARMeilleure.CodeGen.X86
int sib = 0;
if (memOp != null)
if (hasMemOp)
{
// Either source or destination is a memory operand.
Register baseReg = memOp.BaseAddress.GetRegister();
X86Register baseRegLow = (X86Register)(baseReg.Index & 0b111);
needsSibByte = memOp.Index != null || baseRegLow == X86Register.Rsp;
needsSibByte = memOp.Index != default || baseRegLow == X86Register.Rsp;
needsDisplacement = memOp.Displacement != 0 || baseRegLow == X86Register.Rbp;
if (needsDisplacement)
@ -1168,7 +1179,7 @@ namespace ARMeilleure.CodeGen.X86
{
sib = (int)baseRegLow;
if (memOp.Index != null)
if (memOp.Index != default)
{
int indexReg = memOp.Index.GetRegister().Index;
@ -1217,7 +1228,7 @@ namespace ARMeilleure.CodeGen.X86
_ => 0
};
if (src1 != null)
if (src1 != default)
{
vexByte2 |= (src1.GetRegister().Index ^ 0xf) << 3;
}
@ -1284,7 +1295,7 @@ namespace ARMeilleure.CodeGen.X86
}
}
if (dest != null && (flags & InstructionFlags.RegOnly) != 0)
if (dest != default && (flags & InstructionFlags.RegOnly) != 0)
{
opCode += dest.GetRegister().Index & 7;
}
@ -1353,12 +1364,12 @@ namespace ARMeilleure.CodeGen.X86
}
}
if (dest != null && dest.Kind == OperandKind.Register)
if (dest != default && dest.Kind == OperandKind.Register)
{
SetRegisterHighBit(dest.GetRegister(), rrm ? 2 : 0);
}
if (source != null && source.Kind == OperandKind.Register)
if (source != default && source.Kind == OperandKind.Register)
{
SetRegisterHighBit(source.GetRegister(), rrm ? 0 : 2);
}

View file

@ -11,8 +11,7 @@ using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Numerics;
using static ARMeilleure.IntermediateRepresentation.OperandHelper;
using static ARMeilleure.IntermediateRepresentation.Operand.Factory;
namespace ARMeilleure.CodeGen.X86
{
@ -162,20 +161,18 @@ namespace ARMeilleure.CodeGen.X86
{
context.EnterBlock(block);
for (Node node = block.Operations.First; node != null; node = node.ListNext)
for (Operation node = block.Operations.First; node != default; node = node.ListNext)
{
if (node is Operation operation)
{
GenerateOperation(context, operation);
}
GenerateOperation(context, node);
}
if (block.SuccessorCount == 0)
if (block.SuccessorsCount == 0)
{
// The only blocks which can have 0 successors are exit blocks.
Debug.Assert(block.Operations.Last is Operation operation &&
(operation.Instruction == Instruction.Tailcall ||
operation.Instruction == Instruction.Return));
Operation last = block.Operations.Last;
Debug.Assert(last.Instruction == Instruction.Tailcall ||
last.Instruction == Instruction.Return);
}
else
{
@ -205,9 +202,7 @@ namespace ARMeilleure.CodeGen.X86
{
if (operation.Instruction == Instruction.Extended)
{
IntrinsicOperation intrinOp = (IntrinsicOperation)operation;
IntrinsicInfo info = IntrinsicTable.GetInfo(intrinOp.Intrinsic);
IntrinsicInfo info = IntrinsicTable.GetInfo(operation.Intrinsic);
switch (info.Type)
{
@ -217,7 +212,7 @@ namespace ARMeilleure.CodeGen.X86
Operand src1 = operation.GetSource(0);
Operand src2 = operation.GetSource(1);
switch (intrinOp.Intrinsic)
switch (operation.Intrinsic)
{
case Intrinsic.X86Comisdeq:
context.Assembler.Comisd(src1, src2);
@ -266,14 +261,13 @@ namespace ARMeilleure.CodeGen.X86
int offs = offset.AsInt32() + context.CallArgsRegionSize;
Operand rsp = Register(X86Register.Rsp);
MemoryOperand memOp = MemoryOp(OperandType.I32, rsp, null, Multiplier.x1, offs);
Operand memOp = MemoryOp(OperandType.I32, rsp, default, Multiplier.x1, offs);
Debug.Assert(HardwareCapabilities.SupportsSse || HardwareCapabilities.SupportsVexEncoding);
context.Assembler.Stmxcsr(memOp);
if (intrinOp.Intrinsic == Intrinsic.X86Mxcsrmb)
if (operation.Intrinsic == Intrinsic.X86Mxcsrmb)
{
context.Assembler.Or(memOp, bits, OperandType.I32);
}
@ -324,7 +318,7 @@ namespace ARMeilleure.CodeGen.X86
Debug.Assert(dest.Type.IsInteger() && !source.Type.IsInteger());
if (intrinOp.Intrinsic == Intrinsic.X86Cvtsi2si)
if (operation.Intrinsic == Intrinsic.X86Cvtsi2si)
{
if (dest.Type == OperandType.I32)
{
@ -538,7 +532,7 @@ namespace ARMeilleure.CodeGen.X86
if (src2.Kind == OperandKind.Constant)
{
offset = src2.AsInt32();
index = null;
index = default;
}
else
{
@ -546,7 +540,7 @@ namespace ARMeilleure.CodeGen.X86
index = src2;
}
MemoryOperand memOp = MemoryOp(dest.Type, src1, index, Multiplier.x1, offset);
Operand memOp = MemoryOp(dest.Type, src1, index, Multiplier.x1, offset);
context.Assembler.Lea(dest, memOp, dest.Type);
}
@ -720,7 +714,7 @@ namespace ARMeilleure.CodeGen.X86
if (operation.SourcesCount == 5) // CompareAndSwap128 has 5 sources, compared to CompareAndSwap64/32's 3.
{
MemoryOperand memOp = MemoryOp(OperandType.I64, src1);
Operand memOp = MemoryOp(OperandType.I64, src1);
context.Assembler.Cmpxchg16b(memOp);
}
@ -731,7 +725,7 @@ namespace ARMeilleure.CodeGen.X86
EnsureSameType(src2, src3);
MemoryOperand memOp = MemoryOp(src3.Type, src1);
Operand memOp = MemoryOp(src3.Type, src1);
context.Assembler.Cmpxchg(memOp, src3);
}
@ -745,7 +739,7 @@ namespace ARMeilleure.CodeGen.X86
EnsureSameType(src2, src3);
MemoryOperand memOp = MemoryOp(src3.Type, src1);
Operand memOp = MemoryOp(src3.Type, src1);
context.Assembler.Cmpxchg16(memOp, src3);
}
@ -758,7 +752,7 @@ namespace ARMeilleure.CodeGen.X86
EnsureSameType(src2, src3);
MemoryOperand memOp = MemoryOp(src3.Type, src1);
Operand memOp = MemoryOp(src3.Type, src1);
context.Assembler.Cmpxchg8(memOp, src3);
}
@ -954,7 +948,7 @@ namespace ARMeilleure.CodeGen.X86
Operand rsp = Register(X86Register.Rsp);
MemoryOperand memOp = MemoryOp(dest.Type, rsp, null, Multiplier.x1, offs);
Operand memOp = MemoryOp(dest.Type, rsp, default, Multiplier.x1, offs);
GenerateLoad(context, memOp, dest);
}
@ -1153,7 +1147,7 @@ namespace ARMeilleure.CodeGen.X86
Operand rsp = Register(X86Register.Rsp);
MemoryOperand memOp = MemoryOp(source.Type, rsp, null, Multiplier.x1, offs);
Operand memOp = MemoryOp(source.Type, rsp, default, Multiplier.x1, offs);
GenerateStore(context, memOp, source);
}
@ -1169,7 +1163,7 @@ namespace ARMeilleure.CodeGen.X86
Operand rsp = Register(X86Register.Rsp);
MemoryOperand memOp = MemoryOp(OperandType.I64, rsp, null, Multiplier.x1, offs);
Operand memOp = MemoryOp(OperandType.I64, rsp, default, Multiplier.x1, offs);
context.Assembler.Lea(dest, memOp, OperandType.I64);
}
@ -1644,19 +1638,19 @@ namespace ARMeilleure.CodeGen.X86
context.Assembler.Pshufd(dest, dest, 0xfc);
}
private static bool MatchOperation(Node node, Instruction inst, OperandType destType, Register destReg)
private static bool MatchOperation(Operation node, Instruction inst, OperandType destType, Register destReg)
{
if (!(node is Operation operation) || node.DestinationsCount == 0)
if (node == default || node.DestinationsCount == 0)
{
return false;
}
if (operation.Instruction != inst)
if (node.Instruction != inst)
{
return false;
}
Operand dest = operation.Destination;
Operand dest = node.Destination;
return dest.Kind == OperandKind.Register &&
dest.Type == destType &&
@ -1761,7 +1755,7 @@ namespace ARMeilleure.CodeGen.X86
offset -= 16;
MemoryOperand memOp = MemoryOp(OperandType.V128, rsp, null, Multiplier.x1, offset);
Operand memOp = MemoryOp(OperandType.V128, rsp, default, Multiplier.x1, offset);
context.Assembler.Movdqu(memOp, Xmm((X86Register)bit));
@ -1791,7 +1785,7 @@ namespace ARMeilleure.CodeGen.X86
offset -= 16;
MemoryOperand memOp = MemoryOp(OperandType.V128, rsp, null, Multiplier.x1, offset);
Operand memOp = MemoryOp(OperandType.V128, rsp, default, Multiplier.x1, offset);
context.Assembler.Movdqu(Xmm((X86Register)bit), memOp);
@ -1832,17 +1826,17 @@ namespace ARMeilleure.CodeGen.X86
for (int offset = PageSize; offset < size; offset += PageSize)
{
Operand memOp = MemoryOp(OperandType.I32, rsp, null, Multiplier.x1, -offset);
Operand memOp = MemoryOp(OperandType.I32, rsp, default, Multiplier.x1, -offset);
context.Assembler.Mov(temp, memOp, OperandType.I32);
}
}
private static MemoryOperand Memory(Operand operand, OperandType type)
private static Operand Memory(Operand operand, OperandType type)
{
if (operand.Kind == OperandKind.Memory)
{
return operand as MemoryOperand;
return operand;
}
return MemoryOp(type, operand);
@ -1850,12 +1844,12 @@ namespace ARMeilleure.CodeGen.X86
private static Operand Register(X86Register register, OperandType type = OperandType.I64)
{
return OperandHelper.Register((int)register, RegisterType.Integer, type);
return Operand.Factory.Register((int)register, RegisterType.Integer, type);
}
private static Operand Xmm(X86Register register)
{
return OperandHelper.Register((int)register, RegisterType.Vector, OperandType.V128);
return Operand.Factory.Register((int)register, RegisterType.Vector, OperandType.V128);
}
}
}

File diff suppressed because it is too large Load diff

View file

@ -2,9 +2,8 @@
using ARMeilleure.IntermediateRepresentation;
using ARMeilleure.Translation;
using System.Collections.Generic;
using static ARMeilleure.IntermediateRepresentation.OperandHelper;
using static ARMeilleure.IntermediateRepresentation.OperationHelper;
using static ARMeilleure.IntermediateRepresentation.Operand.Factory;
using static ARMeilleure.IntermediateRepresentation.Operation.Factory;
namespace ARMeilleure.CodeGen.X86
{
@ -34,32 +33,27 @@ namespace ARMeilleure.CodeGen.X86
{
constants.Clear();
Node nextNode;
Operation nextNode;
for (Node node = block.Operations.First; node != null; node = nextNode)
for (Operation node = block.Operations.First; node != default; node = nextNode)
{
nextNode = node.ListNext;
if (!(node is Operation operation))
{
continue;
}
// Insert copies for constants that can't fit on a 32-bits immediate.
// Doing this early unblocks a few optimizations.
if (operation.Instruction == Instruction.Add)
if (node.Instruction == Instruction.Add)
{
Operand src1 = operation.GetSource(0);
Operand src2 = operation.GetSource(1);
Operand src1 = node.GetSource(0);
Operand src2 = node.GetSource(1);
if (src1.Kind == OperandKind.Constant && (src1.Relocatable || CodeGenCommon.IsLongConst(src1)))
{
operation.SetSource(0, GetConstantCopy(block, operation, src1));
node.SetSource(0, GetConstantCopy(block, node, src1));
}
if (src2.Kind == OperandKind.Constant && (src2.Relocatable || CodeGenCommon.IsLongConst(src2)))
{
operation.SetSource(1, GetConstantCopy(block, operation, src2));
node.SetSource(1, GetConstantCopy(block, node, src2));
}
}
@ -70,24 +64,24 @@ namespace ARMeilleure.CodeGen.X86
// mov rax, [rax]
// Into:
// mov rax, [rax+rbx*4+0xcafe]
if (IsMemoryLoadOrStore(operation.Instruction))
if (IsMemoryLoadOrStore(node.Instruction))
{
OperandType type;
if (operation.Destination != null)
if (node.Destination != default)
{
type = operation.Destination.Type;
type = node.Destination.Type;
}
else
{
type = operation.GetSource(1).Type;
type = node.GetSource(1).Type;
}
MemoryOperand memOp = GetMemoryOperandOrNull(operation.GetSource(0), type);
Operand memOp = GetMemoryOperandOrNull(node.GetSource(0), type);
if (memOp != null)
if (memOp != default)
{
operation.SetSource(0, memOp);
node.SetSource(0, memOp);
}
}
}
@ -96,7 +90,7 @@ namespace ARMeilleure.CodeGen.X86
Optimizer.RemoveUnusedNodes(cfg);
}
private static MemoryOperand GetMemoryOperandOrNull(Operand addr, OperandType type)
private static Operand GetMemoryOperandOrNull(Operand addr, OperandType type)
{
Operand baseOp = addr;
@ -117,10 +111,10 @@ namespace ARMeilleure.CodeGen.X86
// If baseOp is still equal to address, then there's nothing that can be optimized.
if (baseOp == addr)
{
return null;
return default;
}
if (imm == 0 && scale == Multiplier.x1 && indexOp != null)
if (imm == 0 && scale == Multiplier.x1 && indexOp != default)
{
imm = GetConstOp(ref indexOp);
}
@ -132,7 +126,7 @@ namespace ARMeilleure.CodeGen.X86
{
Operation operation = GetAsgOpWithInst(baseOp, Instruction.Add);
if (operation == null)
if (operation == default)
{
return 0;
}
@ -172,13 +166,13 @@ namespace ARMeilleure.CodeGen.X86
private static (Operand, Multiplier) GetIndexOp(ref Operand baseOp)
{
Operand indexOp = null;
Operand indexOp = default;
Multiplier scale = Multiplier.x1;
Operation addOp = GetAsgOpWithInst(baseOp, Instruction.Add);
if (addOp == null)
if (addOp == default)
{
return (indexOp, scale);
}
@ -198,14 +192,14 @@ namespace ARMeilleure.CodeGen.X86
bool indexOnSrc2 = false;
if (shlOp == null)
if (shlOp == default)
{
shlOp = GetAsgOpWithInst(src2, Instruction.ShiftLeft);
indexOnSrc2 = true;
}
if (shlOp != null)
if (shlOp != default)
{
Operand shSrc = shlOp.GetSource(0);
Operand shift = shlOp.GetSource(1);
@ -233,24 +227,19 @@ namespace ARMeilleure.CodeGen.X86
// If we have multiple assignments, folding is not safe
// as the value may be different depending on the
// control flow path.
if (op.Assignments.Count != 1)
if (op.AssignmentsCount != 1)
{
return null;
return default;
}
Node asgOp = op.Assignments[0];
Operation asgOp = op.Assignments[0];
if (!(asgOp is Operation operation))
if (asgOp.Instruction != inst)
{
return null;
return default;
}
if (operation.Instruction != inst)
{
return null;
}
return operation;
return asgOp;
}
private static bool IsMemoryLoadOrStore(Instruction inst)