package com.oracle.svm.core.graal.amd64;
import static com.oracle.svm.core.graal.code.SubstrateBackend.SubstrateMarkId.PROLOGUE_DECD_RSP;
import static com.oracle.svm.core.graal.code.SubstrateBackend.SubstrateMarkId.PROLOGUE_END;
import static com.oracle.svm.core.util.VMError.shouldNotReachHere;
import static com.oracle.svm.core.util.VMError.unimplemented;
import static jdk.vm.ci.amd64.AMD64.rax;
import static jdk.vm.ci.amd64.AMD64.rbp;
import static jdk.vm.ci.amd64.AMD64.rsp;
import static jdk.vm.ci.amd64.AMD64.xmm0;
import static jdk.vm.ci.code.ValueUtil.asRegister;
import static jdk.vm.ci.code.ValueUtil.isRegister;
import static org.graalvm.compiler.lir.LIRInstruction.OperandFlag.REG;
import static org.graalvm.compiler.lir.LIRValueUtil.asConstantValue;
import static org.graalvm.compiler.lir.LIRValueUtil.differentRegisters;
import java.util.Collection;
import org.graalvm.compiler.asm.amd64.AMD64Address;
import org.graalvm.compiler.asm.amd64.AMD64Assembler;
import org.graalvm.compiler.asm.amd64.AMD64MacroAssembler;
import org.graalvm.compiler.code.CompilationResult;
import org.graalvm.compiler.core.amd64.AMD64ArithmeticLIRGenerator;
import org.graalvm.compiler.core.amd64.AMD64LIRGenerator;
import org.graalvm.compiler.core.amd64.AMD64LIRKindTool;
import org.graalvm.compiler.core.amd64.AMD64MoveFactory;
import org.graalvm.compiler.core.amd64.AMD64MoveFactoryBase;
import org.graalvm.compiler.core.amd64.AMD64MoveFactoryBase.BackupSlotProvider;
import org.graalvm.compiler.core.amd64.AMD64NodeLIRBuilder;
import org.graalvm.compiler.core.amd64.AMD64NodeMatchRules;
import org.graalvm.compiler.core.common.CompilationIdentifier;
import org.graalvm.compiler.core.common.CompressEncoding;
import org.graalvm.compiler.core.common.LIRKind;
import org.graalvm.compiler.core.common.alloc.RegisterAllocationConfig;
import org.graalvm.compiler.core.common.spi.ForeignCallLinkage;
import org.graalvm.compiler.core.common.spi.LIRKindTool;
import org.graalvm.compiler.core.gen.DebugInfoBuilder;
import org.graalvm.compiler.core.gen.LIRGenerationProvider;
import org.graalvm.compiler.core.gen.NodeLIRBuilder;
import org.graalvm.compiler.debug.DebugContext;
import org.graalvm.compiler.lir.ConstantValue;
import org.graalvm.compiler.lir.LIR;
import org.graalvm.compiler.lir.LIRFrameState;
import org.graalvm.compiler.lir.LIRInstruction;
import org.graalvm.compiler.lir.LIRInstructionClass;
import org.graalvm.compiler.lir.LabelRef;
import org.graalvm.compiler.lir.Opcode;
import org.graalvm.compiler.lir.StandardOp.BlockEndOp;
import org.graalvm.compiler.lir.StandardOp.LoadConstantOp;
import org.graalvm.compiler.lir.Variable;
import org.graalvm.compiler.lir.amd64.AMD64AddressValue;
import org.graalvm.compiler.lir.amd64.AMD64BreakpointOp;
import org.graalvm.compiler.lir.amd64.AMD64Call;
import org.graalvm.compiler.lir.amd64.AMD64ControlFlow.BranchOp;
import org.graalvm.compiler.lir.amd64.AMD64FrameMap;
import org.graalvm.compiler.lir.amd64.AMD64FrameMapBuilder;
import org.graalvm.compiler.lir.amd64.AMD64LIRInstruction;
import org.graalvm.compiler.lir.amd64.AMD64Move;
import org.graalvm.compiler.lir.amd64.AMD64Move.MoveFromConstOp;
import org.graalvm.compiler.lir.amd64.AMD64Move.PointerCompressionOp;
import org.graalvm.compiler.lir.amd64.AMD64PrefetchOp;
import org.graalvm.compiler.lir.asm.CompilationResultBuilder;
import org.graalvm.compiler.lir.asm.CompilationResultBuilderFactory;
import org.graalvm.compiler.lir.asm.DataBuilder;
import org.graalvm.compiler.lir.asm.FrameContext;
import org.graalvm.compiler.lir.framemap.FrameMap;
import org.graalvm.compiler.lir.framemap.FrameMapBuilder;
import org.graalvm.compiler.lir.framemap.FrameMapBuilderTool;
import org.graalvm.compiler.lir.framemap.ReferenceMapBuilder;
import org.graalvm.compiler.lir.gen.LIRGenerationResult;
import org.graalvm.compiler.lir.gen.LIRGeneratorTool;
import org.graalvm.compiler.nodes.BreakpointNode;
import org.graalvm.compiler.nodes.CallTargetNode;
import org.graalvm.compiler.nodes.DirectCallTargetNode;
import org.graalvm.compiler.nodes.IndirectCallTargetNode;
import org.graalvm.compiler.nodes.InvokeWithExceptionNode;
import org.graalvm.compiler.nodes.LogicNode;
import org.graalvm.compiler.nodes.NodeView;
import org.graalvm.compiler.nodes.SafepointNode;
import org.graalvm.compiler.nodes.StructuredGraph;
import org.graalvm.compiler.nodes.ValueNode;
import org.graalvm.compiler.nodes.spi.NodeLIRBuilderTool;
import org.graalvm.compiler.nodes.spi.NodeValueMap;
import org.graalvm.compiler.options.OptionValues;
import org.graalvm.compiler.phases.Phase;
import org.graalvm.compiler.phases.common.AddressLoweringPhase;
import org.graalvm.compiler.phases.util.Providers;
import org.graalvm.nativeimage.ImageSingletons;
import com.oracle.svm.core.FrameAccess;
import com.oracle.svm.core.ReservedRegisters;
import com.oracle.svm.core.SubstrateOptions;
import com.oracle.svm.core.SubstrateUtil;
import com.oracle.svm.core.code.CodeInfoTable;
import com.oracle.svm.core.config.ConfigurationValues;
import com.oracle.svm.core.deopt.DeoptimizedFrame;
import com.oracle.svm.core.deopt.Deoptimizer;
import com.oracle.svm.core.graal.code.PatchConsumerFactory;
import com.oracle.svm.core.graal.code.SubstrateBackend;
import com.oracle.svm.core.graal.code.SubstrateCallingConvention;
import com.oracle.svm.core.graal.code.SubstrateCallingConventionType;
import com.oracle.svm.core.graal.code.SubstrateCompiledCode;
import com.oracle.svm.core.graal.code.SubstrateDataBuilder;
import com.oracle.svm.core.graal.code.SubstrateDebugInfoBuilder;
import com.oracle.svm.core.graal.code.SubstrateLIRGenerator;
import com.oracle.svm.core.graal.code.SubstrateNodeLIRBuilder;
import com.oracle.svm.core.graal.lir.VerificationMarkerOp;
import com.oracle.svm.core.graal.meta.RuntimeConfiguration;
import com.oracle.svm.core.graal.meta.SubstrateForeignCallLinkage;
import com.oracle.svm.core.graal.meta.SubstrateRegisterConfig;
import com.oracle.svm.core.graal.nodes.CGlobalDataLoadAddressNode;
import com.oracle.svm.core.heap.ReferenceAccess;
import com.oracle.svm.core.heap.SubstrateReferenceMapBuilder;
import com.oracle.svm.core.meta.CompressedNullConstant;
import com.oracle.svm.core.meta.SharedMethod;
import com.oracle.svm.core.meta.SubstrateObjectConstant;
import com.oracle.svm.core.nodes.SafepointCheckNode;
import com.oracle.svm.core.thread.VMThreads.StatusSupport;
import com.oracle.svm.core.util.VMError;
import jdk.vm.ci.amd64.AMD64;
import jdk.vm.ci.amd64.AMD64Kind;
import jdk.vm.ci.code.CallingConvention;
import jdk.vm.ci.code.CodeCacheProvider;
import jdk.vm.ci.code.CodeUtil;
import jdk.vm.ci.code.CompilationRequest;
import jdk.vm.ci.code.CompiledCode;
import jdk.vm.ci.code.Register;
import jdk.vm.ci.code.RegisterConfig;
import jdk.vm.ci.code.RegisterValue;
import jdk.vm.ci.code.StackSlot;
import jdk.vm.ci.code.ValueUtil;
import jdk.vm.ci.meta.AllocatableValue;
import jdk.vm.ci.meta.Constant;
import jdk.vm.ci.meta.JavaConstant;
import jdk.vm.ci.meta.JavaKind;
import jdk.vm.ci.meta.JavaType;
import jdk.vm.ci.meta.ResolvedJavaMethod;
import jdk.vm.ci.meta.Value;
public class SubstrateAMD64Backend extends SubstrateBackend implements LIRGenerationProvider {
protected static CompressEncoding getCompressEncoding() {
return ImageSingletons.lookup(CompressEncoding.class);
}
public SubstrateAMD64Backend(Providers providers) {
super(providers);
}
@Opcode("CALL_DIRECT")
public static class SubstrateAMD64DirectCallOp extends AMD64Call.DirectCallOp {
public static final LIRInstructionClass<SubstrateAMD64DirectCallOp> TYPE = LIRInstructionClass.create(SubstrateAMD64DirectCallOp.class);
private final RuntimeConfiguration runtimeConfiguration;
private final int newThreadStatus;
@Use({REG, OperandFlag.ILLEGAL}) private Value javaFrameAnchor;
@Temp({REG, OperandFlag.ILLEGAL}) private Value javaFrameAnchorTemp;
private final boolean destroysCallerSavedRegisters;
@Temp({REG, OperandFlag.ILLEGAL}) private Value exceptionTemp;
public SubstrateAMD64DirectCallOp(RuntimeConfiguration runtimeConfiguration, ResolvedJavaMethod callTarget, Value result, Value[] parameters, Value[] temps, LIRFrameState state,
Value javaFrameAnchor, Value javaFrameAnchorTemp, int newThreadStatus, boolean destroysCallerSavedRegisters, Value exceptionTemp) {
super(TYPE, callTarget, result, parameters, temps, state);
this.runtimeConfiguration = runtimeConfiguration;
this.newThreadStatus = newThreadStatus;
this.javaFrameAnchor = javaFrameAnchor;
this.javaFrameAnchorTemp = javaFrameAnchorTemp;
this.destroysCallerSavedRegisters = destroysCallerSavedRegisters;
this.exceptionTemp = exceptionTemp;
assert differentRegisters(parameters, temps, javaFrameAnchor, javaFrameAnchorTemp);
}
@Override
public void emitCode(CompilationResultBuilder crb, AMD64MacroAssembler masm) {
maybeTransitionToNative(crb, masm, runtimeConfiguration, javaFrameAnchor, javaFrameAnchorTemp, state, newThreadStatus);
AMD64Call.directCall(crb, masm, callTarget, null, false, state);
}
@Override
public boolean destroysCallerSavedRegisters() {
return destroysCallerSavedRegisters;
}
}
@Opcode("CALL_INDIRECT")
public static class SubstrateAMD64IndirectCallOp extends AMD64Call.IndirectCallOp {
public static final LIRInstructionClass<SubstrateAMD64IndirectCallOp> TYPE = LIRInstructionClass.create(SubstrateAMD64IndirectCallOp.class);
private final RuntimeConfiguration runtimeConfiguration;
private final int newThreadStatus;
@Use({REG, OperandFlag.ILLEGAL}) private Value javaFrameAnchor;
@Temp({REG, OperandFlag.ILLEGAL}) private Value javaFrameAnchorTemp;
private final boolean destroysCallerSavedRegisters;
@Temp({REG, OperandFlag.ILLEGAL}) private Value exceptionTemp;
public SubstrateAMD64IndirectCallOp(RuntimeConfiguration runtimeConfiguration, ResolvedJavaMethod callTarget, Value result, Value[] parameters, Value[] temps, Value targetAddress,
LIRFrameState state, Value javaFrameAnchor, Value javaFrameAnchorTemp, int newThreadStatus, boolean destroysCallerSavedRegisters, Value exceptionTemp) {
super(TYPE, callTarget, result, parameters, temps, targetAddress, state);
this.runtimeConfiguration = runtimeConfiguration;
this.newThreadStatus = newThreadStatus;
this.javaFrameAnchor = javaFrameAnchor;
this.javaFrameAnchorTemp = javaFrameAnchorTemp;
this.destroysCallerSavedRegisters = destroysCallerSavedRegisters;
this.exceptionTemp = exceptionTemp;
assert differentRegisters(parameters, temps, targetAddress, javaFrameAnchor, javaFrameAnchorTemp);
}
@Override
public void emitCode(CompilationResultBuilder crb, AMD64MacroAssembler masm) {
maybeTransitionToNative(crb, masm, runtimeConfiguration, javaFrameAnchor, javaFrameAnchorTemp, state, newThreadStatus);
AMD64Call.indirectCall(crb, masm, asRegister(targetAddress), callTarget, state);
}
@Override
public boolean destroysCallerSavedRegisters() {
return destroysCallerSavedRegisters;
}
}
static void maybeTransitionToNative(CompilationResultBuilder crb, AMD64MacroAssembler masm, RuntimeConfiguration runtimeConfiguration, Value javaFrameAnchor, Value temp, LIRFrameState state,
int newThreadStatus) {
if (ValueUtil.isIllegal(javaFrameAnchor)) {
assert newThreadStatus == StatusSupport.STATUS_ILLEGAL;
return;
}
assert StatusSupport.isValidStatus(newThreadStatus);
Register anchor = ValueUtil.asRegister(javaFrameAnchor);
Register lastJavaIP = ValueUtil.asRegister(temp);
int startPos = masm.position();
masm.leaq(lastJavaIP, new AMD64Address(AMD64.rip));
crb.recordIndirectCall(startPos, masm.position(), null, state);
masm.movq(new AMD64Address(anchor, runtimeConfiguration.getJavaFrameAnchorLastIPOffset()), lastJavaIP);
masm.movq(new AMD64Address(anchor, runtimeConfiguration.getJavaFrameAnchorLastSPOffset()), AMD64.rsp);
if (SubstrateOptions.MultiThreaded.getValue()) {
masm.movl(new AMD64Address(ReservedRegisters.singleton().getThreadRegister(), runtimeConfiguration.getVMThreadStatusOffset()), newThreadStatus);
}
}
@Opcode("DEAD_END")
public static class DeadEndOp extends LIRInstruction implements BlockEndOp {
public static final LIRInstructionClass<DeadEndOp> TYPE = LIRInstructionClass.create(DeadEndOp.class);
public DeadEndOp() {
super(TYPE);
}
@Override
public void emitCode(CompilationResultBuilder crb) {
if (SubstrateUtil.assertionsEnabled()) {
((AMD64Assembler) crb.asm).int3();
}
}
}
protected static final class SubstrateLIRGenerationResult extends LIRGenerationResult {
private final SharedMethod method;
public SubstrateLIRGenerationResult(CompilationIdentifier compilationId, LIR lir, FrameMapBuilder frameMapBuilder, CallingConvention callingConvention,
RegisterAllocationConfig registerAllocationConfig, SharedMethod method) {
super(compilationId, lir, frameMapBuilder, registerAllocationConfig, callingConvention);
this.method = method;
if (method.hasCalleeSavedRegisters()) {
AMD64CalleeSavedRegisters calleeSavedRegisters = AMD64CalleeSavedRegisters.singleton();
FrameMap frameMap = ((FrameMapBuilderTool) frameMapBuilder).getFrameMap();
int registerSaveAreaSizeInBytes = method.hasCalleeSavedRegisters() ? calleeSavedRegisters.getSaveAreaSize() : 0;
StackSlot calleeSaveArea = frameMap.allocateStackSlots(registerSaveAreaSizeInBytes / frameMap.getTarget().wordSize);
calleeSavedRegisters.verifySaveAreaOffsetInFrame(calleeSaveArea.getRawOffset());
}
if (method.canDeoptimize() || method.isDeoptTarget()) {
((FrameMapBuilderTool) frameMapBuilder).getFrameMap().reserveOutgoing(16);
}
}
public SharedMethod getMethod() {
return method;
}
}
protected class SubstrateAMD64LIRGenerator extends AMD64LIRGenerator implements SubstrateLIRGenerator {
public SubstrateAMD64LIRGenerator(LIRKindTool lirKindTool, AMD64ArithmeticLIRGenerator arithmeticLIRGen, MoveFactory moveFactory, Providers providers, LIRGenerationResult lirGenRes) {
super(lirKindTool, arithmeticLIRGen, moveFactory, providers, lirGenRes);
}
@Override
public SubstrateLIRGenerationResult getResult() {
return (SubstrateLIRGenerationResult) super.getResult();
}
@Override
public SubstrateRegisterConfig getRegisterConfig() {
return (SubstrateRegisterConfig) super.getRegisterConfig();
}
protected boolean getDestroysCallerSavedRegisters(ResolvedJavaMethod targetMethod) {
if (getResult().getMethod().isDeoptTarget()) {
return true;
}
return targetMethod == null || !((SharedMethod) targetMethod).hasCalleeSavedRegisters();
}
@Override
protected Value emitIndirectForeignCallAddress(ForeignCallLinkage linkage) {
if (!shouldEmitOnlyIndirectCalls()) {
return null;
}
SubstrateForeignCallLinkage callTarget = (SubstrateForeignCallLinkage) linkage;
SharedMethod targetMethod = (SharedMethod) callTarget.getMethod();
Value codeOffsetInImage = emitConstant(getLIRKindTool().getWordKind(), JavaConstant.forInt(targetMethod.getCodeOffsetInImage()));
Value codeInfo = emitJavaConstant(SubstrateObjectConstant.forObject(CodeInfoTable.getImageCodeCache()));
Value codeStartField = new AMD64AddressValue(getLIRKindTool().getWordKind(), asAllocatable(codeInfo), getRuntimeConfiguration().getImageCodeInfoCodeStartOffset());
Value codeStart = getArithmetic().emitLoad(getLIRKindTool().getWordKind(), codeStartField, null);
return getArithmetic().emitAdd(codeStart, codeOffsetInImage, false);
}
@Override
protected void emitForeignCallOp(ForeignCallLinkage linkage, Value targetAddress, Value result, Value[] arguments, Value[] temps, LIRFrameState info) {
SubstrateForeignCallLinkage callTarget = (SubstrateForeignCallLinkage) linkage;
SharedMethod targetMethod = (SharedMethod) callTarget.getMethod();
if (shouldEmitOnlyIndirectCalls()) {
AllocatableValue targetRegister = AMD64.rax.asValue(FrameAccess.getWordStamp().getLIRKind(getLIRKindTool()));
emitMove(targetRegister, targetAddress);
append(new SubstrateAMD64IndirectCallOp(getRuntimeConfiguration(), targetMethod, result, arguments, temps, targetRegister, info,
Value.ILLEGAL, Value.ILLEGAL, StatusSupport.STATUS_ILLEGAL, getDestroysCallerSavedRegisters(targetMethod), Value.ILLEGAL));
} else {
assert targetAddress == null;
append(new SubstrateAMD64DirectCallOp(getRuntimeConfiguration(), targetMethod, result, arguments, temps, info, Value.ILLEGAL,
Value.ILLEGAL, StatusSupport.STATUS_ILLEGAL, getDestroysCallerSavedRegisters(targetMethod), Value.ILLEGAL));
}
}
@Override
public void emitUnwind(Value operand) {
throw shouldNotReachHere("handled by lowering");
}
@Override
public void emitDeoptimize(Value actionAndReason, Value failedSpeculation, LIRFrameState state) {
throw shouldNotReachHere("Substrate VM does not use deoptimization");
}
@Override
public void emitCCall(long address, CallingConvention nativeCallingConvention, Value[] args, int numberOfFloatingPointArguments) {
throw unimplemented();
}
@Override
public void emitVerificationMarker(Object marker) {
append(new VerificationMarkerOp(marker));
}
@Override
public void emitInstructionSynchronizationBarrier() {
throw shouldNotReachHere("AMD64 does not need instruction synchronization");
}
@Override
public void emitFarReturn(AllocatableValue result, Value sp, Value ip, boolean fromMethodWithCalleeSavedRegisters) {
append(new AMD64FarReturnOp(result, asAllocatable(sp), asAllocatable(ip), fromMethodWithCalleeSavedRegisters));
}
@Override
public void emitDeadEnd() {
append(new DeadEndOp());
}
@Override
public void emitPrefetchAllocate(Value address) {
append(new AMD64PrefetchOp(asAddressValue(address), SubstrateOptions.AllocatePrefetchInstr.getValue()));
}
@Override
public Value emitCompress(Value pointer, CompressEncoding encoding, boolean isNonNull) {
Variable result = newVariable(getLIRKindTool().getNarrowOopKind());
boolean nonNull = useLinearPointerCompression() || isNonNull;
append(new AMD64Move.CompressPointerOp(result, asAllocatable(pointer), ReservedRegisters.singleton().getHeapBaseRegister().asValue(), encoding, nonNull, getLIRKindTool()));
return result;
}
@Override
public Value emitUncompress(Value pointer, CompressEncoding encoding, boolean isNonNull) {
assert pointer.getValueKind(LIRKind.class).getPlatformKind() == getLIRKindTool().getNarrowOopKind().getPlatformKind();
Variable result = newVariable(getLIRKindTool().getObjectKind());
boolean nonNull = useLinearPointerCompression() || isNonNull;
append(new AMD64Move.UncompressPointerOp(result, asAllocatable(pointer), ReservedRegisters.singleton().getHeapBaseRegister().asValue(), encoding, nonNull, getLIRKindTool()));
return result;
}
@Override
public void emitConvertNullToZero(AllocatableValue result, Value value) {
if (useLinearPointerCompression()) {
append(new AMD64Move.ConvertNullToZeroOp(result, (AllocatableValue) value));
} else {
emitMove(result, value);
}
}
@Override
public void emitConvertZeroToNull(AllocatableValue result, Value value) {
if (useLinearPointerCompression()) {
append(new AMD64Move.ConvertZeroToNullOp(result, (AllocatableValue) value));
} else {
emitMove(result, value);
}
}
}
public final class SubstrateAMD64NodeLIRBuilder extends AMD64NodeLIRBuilder implements SubstrateNodeLIRBuilder {
public SubstrateAMD64NodeLIRBuilder(StructuredGraph graph, LIRGeneratorTool gen, AMD64NodeMatchRules nodeMatchRules) {
super(graph, gen, nodeMatchRules);
}
@Override
public void visitSafepointNode(SafepointNode node) {
throw shouldNotReachHere("handled by lowering");
}
@Override
public void visitBreakpointNode(BreakpointNode node) {
JavaType[] sig = new JavaType[node.arguments().size()];
for (int i = 0; i < sig.length; i++) {
sig[i] = node.arguments().get(i).stamp(NodeView.DEFAULT).javaType(gen.getMetaAccess());
}
CallingConvention convention = gen.getRegisterConfig().getCallingConvention(SubstrateCallingConventionType.JavaCall, null, sig, gen);
append(new AMD64BreakpointOp(visitInvokeArguments(convention, node.arguments())));
}
@Override
protected DebugInfoBuilder createDebugInfoBuilder(StructuredGraph graph, NodeValueMap nodeValueMap) {
return new SubstrateDebugInfoBuilder(graph, gen.getProviders().getMetaAccessExtensionProvider(), nodeValueMap);
}
@Override
public Value[] visitInvokeArguments(CallingConvention invokeCc, Collection<ValueNode> arguments) {
Value[] values = super.visitInvokeArguments(invokeCc, arguments);
SubstrateCallingConventionType type = (SubstrateCallingConventionType) ((SubstrateCallingConvention) invokeCc).getType();
if (type.nativeABI) {
int xmmCount = 0;
for (Value v : values) {
if (isRegister(v) && asRegister(v).getRegisterCategory().equals(AMD64.XMM)) {
xmmCount++;
}
}
assert xmmCount <= 8;
AllocatableValue xmmCountRegister = AMD64.rax.asValue(LIRKind.value(AMD64Kind.DWORD));
gen.emitMoveConstant(xmmCountRegister, JavaConstant.forInt(xmmCount));
}
return values;
}
private boolean getDestroysCallerSavedRegisters(ResolvedJavaMethod targetMethod) {
return ((SubstrateAMD64LIRGenerator) gen).getDestroysCallerSavedRegisters(targetMethod);
}
private Value getExceptionTemp(CallTargetNode callTarget) {
if (callTarget.invoke() instanceof InvokeWithExceptionNode) {
return gen.getRegisterConfig().getReturnRegister(JavaKind.Object).asValue();
} else {
return Value.ILLEGAL;
}
}
@Override
protected void emitDirectCall(DirectCallTargetNode callTarget, Value result, Value[] parameters, Value[] temps, LIRFrameState callState) {
ResolvedJavaMethod targetMethod = callTarget.targetMethod();
append(new SubstrateAMD64DirectCallOp(getRuntimeConfiguration(), targetMethod, result, parameters, temps, callState,
setupJavaFrameAnchor(callTarget), setupJavaFrameAnchorTemp(callTarget), getNewThreadStatus(callTarget),
getDestroysCallerSavedRegisters(targetMethod), getExceptionTemp(callTarget)));
}
@Override
protected void emitIndirectCall(IndirectCallTargetNode callTarget, Value result, Value[] parameters, Value[] temps, LIRFrameState callState) {
Register targetRegister = AMD64.rax;
if (((SubstrateCallingConventionType) callTarget.callType()).nativeABI) {
targetRegister = AMD64.r10;
}
AllocatableValue targetAddress = targetRegister.asValue(FrameAccess.getWordStamp().getLIRKind(getLIRGeneratorTool().getLIRKindTool()));
gen.emitMove(targetAddress, operand(callTarget.computedAddress()));
ResolvedJavaMethod targetMethod = callTarget.targetMethod();
append(new SubstrateAMD64IndirectCallOp(getRuntimeConfiguration(), targetMethod, result, parameters, temps, targetAddress, callState,
setupJavaFrameAnchor(callTarget), setupJavaFrameAnchorTemp(callTarget), getNewThreadStatus(callTarget),
getDestroysCallerSavedRegisters(targetMethod), getExceptionTemp(callTarget)));
}
private AllocatableValue setupJavaFrameAnchor(CallTargetNode callTarget) {
if (!hasJavaFrameAnchor(callTarget)) {
return Value.ILLEGAL;
}
Register frameAnchorRegister = AMD64.r13;
AllocatableValue frameAnchor = frameAnchorRegister.asValue(FrameAccess.getWordStamp().getLIRKind(getLIRGeneratorTool().getLIRKindTool()));
gen.emitMove(frameAnchor, operand(getJavaFrameAnchor(callTarget)));
return frameAnchor;
}
private AllocatableValue setupJavaFrameAnchorTemp(CallTargetNode callTarget) {
if (!hasJavaFrameAnchor(callTarget)) {
return Value.ILLEGAL;
}
return AMD64.r12.asValue(FrameAccess.getWordStamp().getLIRKind(getLIRGeneratorTool().getLIRKindTool()));
}
@Override
public void emitBranch(LogicNode node, LabelRef trueSuccessor, LabelRef falseSuccessor, double trueSuccessorProbability) {
if (node instanceof SafepointCheckNode) {
AMD64SafepointCheckOp op = new AMD64SafepointCheckOp();
append(op);
append(new BranchOp(op.getConditionFlag(), trueSuccessor, falseSuccessor, trueSuccessorProbability));
} else {
super.emitBranch(node, trueSuccessor, falseSuccessor, trueSuccessorProbability);
}
}
@Override
public void emitCGlobalDataLoadAddress(CGlobalDataLoadAddressNode node) {
Variable result = gen.newVariable(gen.getLIRKindTool().getWordKind());
append(new AMD64CGlobalDataLoadAddressOp(node.getDataInfo(), result));
setResult(node, result);
}
@Override
public Variable emitReadReturnAddress() {
assert FrameAccess.returnAddressSize() > 0;
return getLIRGeneratorTool().emitMove(StackSlot.get(getLIRGeneratorTool().getLIRKind(FrameAccess.getWordStamp()), -FrameAccess.returnAddressSize(), true));
}
}
protected static class SubstrateAMD64FrameContext implements FrameContext {
@Override
public void enter(CompilationResultBuilder tasm) {
AMD64MacroAssembler asm = (AMD64MacroAssembler) tasm.asm;
int frameSize = tasm.frameMap.frameSize();
if (((SubstrateAMD64RegisterConfig) tasm.frameMap.getRegisterConfig()).shouldUseBasePointer()) {
asm.push(rbp);
asm.movq(rbp, rsp);
}
asm.decrementq(rsp, frameSize);
tasm.recordMark(PROLOGUE_DECD_RSP);
tasm.recordMark(PROLOGUE_END);
}
@Override
public void leave(CompilationResultBuilder tasm) {
AMD64MacroAssembler asm = (AMD64MacroAssembler) tasm.asm;
int frameSize = tasm.frameMap.frameSize();
tasm.recordMark(SubstrateMarkId.EPILOGUE_START);
if (((SubstrateAMD64RegisterConfig) tasm.frameMap.getRegisterConfig()).shouldUseBasePointer()) {
asm.movq(rsp, rbp);
asm.pop(rbp);
} else {
asm.incrementq(rsp, frameSize);
}
tasm.recordMark(SubstrateMarkId.EPILOGUE_INCD_RSP);
}
@Override
public void returned(CompilationResultBuilder crb) {
crb.recordMark(SubstrateMarkId.EPILOGUE_END);
}
@Override
public boolean hasFrame() {
return true;
}
}
static class AMD64StubCallingConventionSubstrateFrameContext extends SubstrateAMD64FrameContext {
private final JavaKind returnKind;
AMD64StubCallingConventionSubstrateFrameContext(JavaKind returnKind) {
this.returnKind = returnKind;
}
@Override
public void enter(CompilationResultBuilder crb) {
super.enter(crb);
AMD64CalleeSavedRegisters.singleton().emitSave((AMD64MacroAssembler) crb.asm, crb.frameMap.totalFrameSize());
}
@Override
public void leave(CompilationResultBuilder crb) {
Register returnRegister = null;
if (returnKind != JavaKind.Void) {
returnRegister = crb.frameMap.getRegisterConfig().getReturnRegister(returnKind);
}
AMD64CalleeSavedRegisters.singleton().emitRestore((AMD64MacroAssembler) crb.asm, crb.frameMap.totalFrameSize(), returnRegister);
super.leave(crb);
}
}
protected static class DeoptEntryStubContext extends SubstrateAMD64FrameContext {
@Override
public void enter(CompilationResultBuilder tasm) {
AMD64MacroAssembler asm = (AMD64MacroAssembler) tasm.asm;
RegisterConfig registerConfig = tasm.frameMap.getRegisterConfig();
Register deoptimizedFrame = registerConfig.getCallingConventionRegisters(SubstrateCallingConventionType.JavaCall, tasm.target.wordJavaKind).get(0);
asm.movq(deoptimizedFrame, new AMD64Address(registerConfig.getFrameRegister(), 0));
int scratchOffset = DeoptimizedFrame.getScratchSpaceOffset();
asm.movq(new AMD64Address(deoptimizedFrame, scratchOffset), registerConfig.getReturnRegister(JavaKind.Long));
asm.movq(new AMD64Address(deoptimizedFrame, scratchOffset + 8), registerConfig.getReturnRegister(JavaKind.Double));
super.enter(tasm);
}
}
protected static class DeoptExitStubContext extends SubstrateAMD64FrameContext {
@Override
public void enter(CompilationResultBuilder tasm) {
AMD64MacroAssembler asm = (AMD64MacroAssembler) tasm.asm;
Register firstParameter = tasm.frameMap.getRegisterConfig().getCallingConventionRegisters(SubstrateCallingConventionType.JavaCall, tasm.target.wordJavaKind).get(0);
asm.movq(rsp, firstParameter);
asm.subq(rsp, FrameAccess.returnAddressSize());
super.enter(tasm);
}
@Override
public void leave(CompilationResultBuilder tasm) {
AMD64MacroAssembler asm = (AMD64MacroAssembler) tasm.asm;
super.leave(tasm);
int scratchOffset = DeoptimizedFrame.getScratchSpaceOffset();
asm.movq(xmm0, new AMD64Address(rax, scratchOffset + 8));
asm.movq(rax, new AMD64Address(rax, scratchOffset));
}
}
static class SubstrateReferenceMapBuilderFactory implements FrameMap.ReferenceMapBuilderFactory {
@Override
public ReferenceMapBuilder newReferenceMapBuilder(int totalFrameSize) {
return new SubstrateReferenceMapBuilder(totalFrameSize);
}
}
protected static class SubstrateAMD64MoveFactory extends AMD64MoveFactory {
private final SharedMethod method;
protected final LIRKindTool lirKindTool;
protected SubstrateAMD64MoveFactory(BackupSlotProvider backupSlotProvider, SharedMethod method, LIRKindTool lirKindTool) {
super(backupSlotProvider);
this.method = method;
this.lirKindTool = lirKindTool;
}
@Override
public boolean allowConstantToStackMove(Constant constant) {
if (constant instanceof SubstrateObjectConstant && method.isDeoptTarget()) {
return false;
}
return super.allowConstantToStackMove(constant);
}
@Override
public AMD64LIRInstruction createLoad(AllocatableValue dst, Constant src) {
if (CompressedNullConstant.COMPRESSED_NULL.equals(src)) {
return super.createLoad(dst, JavaConstant.INT_0);
} else if (src instanceof SubstrateObjectConstant) {
return loadObjectConstant(dst, (SubstrateObjectConstant) src);
}
return super.createLoad(dst, src);
}
@Override
public LIRInstruction createStackLoad(AllocatableValue dst, Constant src) {
if (CompressedNullConstant.COMPRESSED_NULL.equals(src)) {
return super.createStackLoad(dst, JavaConstant.INT_0);
} else if (src instanceof SubstrateObjectConstant) {
return loadObjectConstant(dst, (SubstrateObjectConstant) src);
}
return super.createStackLoad(dst, src);
}
protected AMD64LIRInstruction loadObjectConstant(AllocatableValue dst, SubstrateObjectConstant constant) {
if (ReferenceAccess.singleton().haveCompressedReferences()) {
RegisterValue heapBase = ReservedRegisters.singleton().getHeapBaseRegister().asValue();
return new LoadCompressedObjectConstantOp(dst, constant, heapBase, getCompressEncoding(), lirKindTool);
}
return new MoveFromConstOp(dst, constant);
}
public static final class LoadCompressedObjectConstantOp extends PointerCompressionOp implements LoadConstantOp {
public static final LIRInstructionClass<LoadCompressedObjectConstantOp> TYPE = LIRInstructionClass.create(LoadCompressedObjectConstantOp.class);
private final SubstrateObjectConstant constant;
static JavaConstant asCompressed(SubstrateObjectConstant constant) {
return constant.isCompressed() ? constant : constant.compress();
}
LoadCompressedObjectConstantOp(AllocatableValue result, SubstrateObjectConstant constant, AllocatableValue baseRegister, CompressEncoding encoding, LIRKindTool lirKindTool) {
super(TYPE, result, new ConstantValue(lirKindTool.getNarrowOopKind(), asCompressed(constant)), baseRegister, encoding, true, lirKindTool);
this.constant = constant;
}
@Override
public Constant getConstant() {
return constant;
}
@Override
public void emitCode(CompilationResultBuilder crb, AMD64MacroAssembler masm) {
Register resultReg = getResultRegister();
int referenceSize = ConfigurationValues.getObjectLayout().getReferenceSize();
Constant inputConstant = asConstantValue(getInput()).getConstant();
if (masm.target.inlineObjects) {
crb.recordInlineDataInCode(inputConstant);
if (referenceSize == 4) {
masm.movl(resultReg, 0xDEADDEAD, true);
} else {
masm.movq(resultReg, 0xDEADDEADDEADDEADL, true);
}
} else {
AMD64Address address = (AMD64Address) crb.recordDataReferenceInCode(inputConstant, referenceSize);
if (referenceSize == 4) {
masm.movl(resultReg, address);
} else {
masm.movq(resultReg, address);
}
}
if (!constant.isCompressed()) {
Register baseReg = getBaseRegister(crb);
boolean preserveFlagsRegister = true;
emitUncompressWithBaseRegister(masm, resultReg, baseReg, getShift(), preserveFlagsRegister);
}
}
}
}
private FrameMapBuilder newFrameMapBuilder(RegisterConfig registerConfig) {
RegisterConfig registerConfigNonNull = registerConfig == null ? getCodeCache().getRegisterConfig() : registerConfig;
FrameMap frameMap = new AMD64FrameMap(getProviders().getCodeCache(), registerConfigNonNull, new SubstrateReferenceMapBuilderFactory(),
((SubstrateAMD64RegisterConfig) registerConfigNonNull).shouldUseBasePointer());
return new AMD64FrameMapBuilder(frameMap, getCodeCache(), registerConfigNonNull);
}
@Override
public LIRGenerationResult newLIRGenerationResult(CompilationIdentifier compilationId, LIR lir, RegisterAllocationConfig registerAllocationConfig, StructuredGraph graph, Object stub) {
SharedMethod method = (SharedMethod) graph.method();
CallingConvention callingConvention = CodeUtil.getCallingConvention(getCodeCache(), method.isEntryPoint() ? SubstrateCallingConventionType.NativeCallee
: SubstrateCallingConventionType.JavaCallee, method, this);
return new SubstrateLIRGenerationResult(compilationId, lir, newFrameMapBuilder(registerAllocationConfig.getRegisterConfig()), callingConvention, registerAllocationConfig, method);
}
protected AMD64ArithmeticLIRGenerator createArithmeticLIRGen(RegisterValue nullRegisterValue) {
return new AMD64ArithmeticLIRGenerator(nullRegisterValue);
}
protected AMD64MoveFactoryBase createMoveFactory(LIRGenerationResult lirGenRes, BackupSlotProvider backupSlotProvider) {
SharedMethod method = ((SubstrateLIRGenerationResult) lirGenRes).getMethod();
return new SubstrateAMD64MoveFactory(backupSlotProvider, method, createLirKindTool());
}
protected static class SubstrateAMD64LIRKindTool extends AMD64LIRKindTool {
@Override
public LIRKind getNarrowOopKind() {
return LIRKind.compressedReference(AMD64Kind.QWORD);
}
@Override
public LIRKind getNarrowPointerKind() {
throw VMError.shouldNotReachHere();
}
}
protected LIRKindTool createLirKindTool() {
return new SubstrateAMD64LIRKindTool();
}
@Override
public LIRGeneratorTool newLIRGenerator(LIRGenerationResult lirGenRes) {
RegisterValue nullRegisterValue = useLinearPointerCompression() ? ReservedRegisters.singleton().getHeapBaseRegister().asValue() : null;
AMD64ArithmeticLIRGenerator arithmeticLIRGen = createArithmeticLIRGen(nullRegisterValue);
BackupSlotProvider backupSlotProvider = new BackupSlotProvider(lirGenRes.getFrameMapBuilder());
AMD64MoveFactoryBase moveFactory = createMoveFactory(lirGenRes, backupSlotProvider);
return new SubstrateAMD64LIRGenerator(createLirKindTool(), arithmeticLIRGen, moveFactory, getProviders(), lirGenRes);
}
protected AMD64NodeMatchRules createMatchRules(LIRGeneratorTool lirGen) {
return new AMD64NodeMatchRules(lirGen);
}
@Override
public NodeLIRBuilderTool newNodeLIRBuilder(StructuredGraph graph, LIRGeneratorTool lirGen) {
AMD64NodeMatchRules nodeMatchRules = createMatchRules(lirGen);
return new SubstrateAMD64NodeLIRBuilder(graph, lirGen, nodeMatchRules);
}
protected static boolean useLinearPointerCompression() {
return SubstrateOptions.SpawnIsolates.getValue();
}
@Override
public CompilationResultBuilder newCompilationResultBuilder(LIRGenerationResult lirGenResult, FrameMap frameMap, CompilationResult compilationResult, CompilationResultBuilderFactory factory) {
LIR lir = lirGenResult.getLIR();
OptionValues options = lir.getOptions();
AMD64MacroAssembler masm = new AMD64MacroAssembler(getTarget(), options);
masm.setCodePatchShifter(compilationResult::shiftCodePatch);
PatchConsumerFactory patchConsumerFactory;
if (SubstrateUtil.HOSTED) {
patchConsumerFactory = PatchConsumerFactory.HostedPatchConsumerFactory.factory();
} else {
patchConsumerFactory = PatchConsumerFactory.NativePatchConsumerFactory.factory();
}
masm.setCodePatchingAnnotationConsumer(patchConsumerFactory.newConsumer(compilationResult));
SharedMethod method = ((SubstrateLIRGenerationResult) lirGenResult).getMethod();
Deoptimizer.StubType stubType = method.getDeoptStubType();
DataBuilder dataBuilder = new SubstrateDataBuilder();
final FrameContext frameContext;
if (stubType == Deoptimizer.StubType.EntryStub) {
frameContext = new DeoptEntryStubContext();
} else if (stubType == Deoptimizer.StubType.ExitStub) {
frameContext = new DeoptExitStubContext();
} else if (method.hasCalleeSavedRegisters()) {
VMError.guarantee(!method.isDeoptTarget(), "Deoptimization runtime cannot fill the callee saved registers");
frameContext = new AMD64StubCallingConventionSubstrateFrameContext(method.getSignature().getReturnKind());
} else {
frameContext = new SubstrateAMD64FrameContext();
}
DebugContext debug = lir.getDebug();
Register uncompressedNullRegister = useLinearPointerCompression() ? ReservedRegisters.singleton().getHeapBaseRegister() : Register.None;
CompilationResultBuilder tasm = factory.createBuilder(getProviders(), lirGenResult.getFrameMap(), masm, dataBuilder, frameContext, options, debug, compilationResult,
uncompressedNullRegister);
tasm.setTotalFrameSize(lirGenResult.getFrameMap().totalFrameSize());
return tasm;
}
@Override
public Phase newAddressLoweringPhase(CodeCacheProvider codeCache) {
CompressEncoding compressEncoding = ImageSingletons.lookup(CompressEncoding.class);
return new AddressLoweringPhase(new SubstrateAMD64AddressLowering(compressEncoding));
}
@Override
public CompiledCode createCompiledCode(ResolvedJavaMethod method, CompilationRequest compilationRequest, CompilationResult compilationResult, boolean isDefault, OptionValues options) {
return new SubstrateCompiledCode(compilationResult);
}
@Override
public void emitCode(CompilationResultBuilder crb, LIR lir, ResolvedJavaMethod installedCodeOwner) {
crb.emit(lir);
}
@Override
public CompilationResult createJNITrampolineMethod(ResolvedJavaMethod method, CompilationIdentifier identifier,
RegisterValue threadArg, int threadIsolateOffset, RegisterValue methodIdArg, int methodObjEntryPointOffset) {
CompilationResult result = new CompilationResult(identifier);
AMD64Assembler asm = new AMD64Assembler(getTarget());
if (SubstrateOptions.SpawnIsolates.getValue()) {
asm.movq(rax, new AMD64Address(threadArg.getRegister(), threadIsolateOffset));
asm.addq(rax, methodIdArg.getRegister());
asm.jmp(new AMD64Address(rax, methodObjEntryPointOffset));
} else {
asm.jmp(new AMD64Address(methodIdArg.getRegister(), methodObjEntryPointOffset));
}
result.recordMark(asm.position(), PROLOGUE_DECD_RSP);
result.recordMark(asm.position(), PROLOGUE_END);
byte[] instructions = asm.close(true);
result.setTargetCode(instructions, instructions.length);
result.setTotalFrameSize(getTarget().wordSize);
return result;
}
}