Skip to content

Commit

Permalink
[GR-50866] Remove single threaded mode (-H:-MultiThreaded).
Browse files Browse the repository at this point in the history
PullRequest: graal/16402
  • Loading branch information
christianhaeubl committed Jan 29, 2024
2 parents f2a7774 + 1ef3fc5 commit e75d2d7
Show file tree
Hide file tree
Showing 57 changed files with 329 additions and 605 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@
import com.oracle.svm.core.thread.PlatformThreads;
import com.oracle.svm.core.thread.VMOperation;
import com.oracle.svm.core.thread.VMThreads;
import com.oracle.svm.core.threadlocal.VMThreadLocalMTSupport;
import com.oracle.svm.core.threadlocal.VMThreadLocalSupport;
import com.oracle.svm.core.util.TimeUtils;
import com.oracle.svm.core.util.VMError;

Expand Down Expand Up @@ -809,24 +809,22 @@ private void blackenStackRoots() {
JavaStackWalker.initWalk(walk, sp, ip);
walkStack(walk);

if (SubstrateOptions.MultiThreaded.getValue()) {
/*
* Scan the stacks of all the threads. Other threads will be blocked at a safepoint
* (or in native code) so they will each have a JavaFrameAnchor in their VMThread.
*/
for (IsolateThread vmThread = VMThreads.firstThread(); vmThread.isNonNull(); vmThread = VMThreads.nextThread(vmThread)) {
if (vmThread == CurrentIsolate.getCurrentThread()) {
/*
* The current thread is already scanned by code above, so we do not have to
* do anything for it here. It might have a JavaFrameAnchor from earlier
* Java-to-C transitions, but certainly not at the top of the stack since it
* is running this code, so just this scan would be incomplete.
*/
continue;
}
if (JavaStackWalker.initWalk(walk, vmThread)) {
walkStack(walk);
}
/*
* Scan the stacks of all the threads. Other threads will be blocked at a safepoint (or
* in native code) so they will each have a JavaFrameAnchor in their VMThread.
*/
for (IsolateThread vmThread = VMThreads.firstThread(); vmThread.isNonNull(); vmThread = VMThreads.nextThread(vmThread)) {
if (vmThread == CurrentIsolate.getCurrentThread()) {
/*
* The current thread is already scanned by code above, so we do not have to do
* anything for it here. It might have a JavaFrameAnchor from earlier Java-to-C
* transitions, but certainly not at the top of the stack since it is running
* this code, so just this scan would be incomplete.
*/
continue;
}
if (JavaStackWalker.initWalk(walk, vmThread)) {
walkStack(walk);
}
}
} finally {
Expand Down Expand Up @@ -891,15 +889,13 @@ private void walkStack(JavaStackWalk walk) {

@Uninterruptible(reason = "Called from uninterruptible code.", mayBeInlined = true)
private void walkThreadLocals() {
if (SubstrateOptions.MultiThreaded.getValue()) {
Timer walkThreadLocalsTimer = timers.walkThreadLocals.open();
try {
for (IsolateThread isolateThread = VMThreads.firstThread(); isolateThread.isNonNull(); isolateThread = VMThreads.nextThread(isolateThread)) {
VMThreadLocalMTSupport.singleton().walk(isolateThread, greyToBlackObjRefVisitor);
}
} finally {
walkThreadLocalsTimer.close();
Timer walkThreadLocalsTimer = timers.walkThreadLocals.open();
try {
for (IsolateThread isolateThread = VMThreads.firstThread(); isolateThread.isNonNull(); isolateThread = VMThreads.nextThread(isolateThread)) {
VMThreadLocalSupport.singleton().walk(isolateThread, greyToBlackObjRefVisitor);
}
} finally {
walkThreadLocalsTimer.close();
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,6 @@
import org.graalvm.word.WordFactory;

import com.oracle.svm.core.FrameAccess;
import com.oracle.svm.core.SubstrateOptions;
import com.oracle.svm.core.Uninterruptible;
import com.oracle.svm.core.genscavenge.AlignedHeapChunk.AlignedHeader;
import com.oracle.svm.core.genscavenge.HeapChunk.Header;
Expand Down Expand Up @@ -175,9 +174,7 @@ private static void cleanAlignedChunk(AlignedHeader alignedChunk) {
*/
private void pushUnusedAlignedChunk(AlignedHeader chunk) {
assert VMOperation.isGCInProgress();
if (SubstrateOptions.MultiThreaded.getValue()) {
VMThreads.guaranteeOwnsThreadMutex("Should hold the lock when pushing to the global list.");
}
VMThreads.guaranteeOwnsThreadMutex("Should hold the lock when pushing to the global list.");

HeapChunk.setNext(chunk, unusedAlignedChunks.get());
unusedAlignedChunks.set(chunk);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,25 +27,23 @@

import java.util.ArrayList;

import com.oracle.svm.core.heap.VMOperationInfos;
import jdk.graal.compiler.word.Word;
import org.graalvm.nativeimage.CurrentIsolate;
import org.graalvm.nativeimage.IsolateThread;
import org.graalvm.nativeimage.c.function.CodePointer;
import org.graalvm.word.Pointer;
import org.graalvm.word.UnsignedWord;
import org.graalvm.word.WordFactory;

import com.oracle.svm.core.SubstrateOptions;
import com.oracle.svm.core.NeverInline;
import com.oracle.svm.core.heap.RestrictHeapAccess;
import com.oracle.svm.core.code.CodeInfo;
import com.oracle.svm.core.code.CodeInfoTable;
import com.oracle.svm.core.deopt.DeoptimizedFrame;
import com.oracle.svm.core.heap.Heap;
import com.oracle.svm.core.heap.ObjectReferenceVisitor;
import com.oracle.svm.core.heap.ObjectVisitor;
import com.oracle.svm.core.heap.ReferenceAccess;
import com.oracle.svm.core.heap.RestrictHeapAccess;
import com.oracle.svm.core.heap.VMOperationInfos;
import com.oracle.svm.core.hub.InteriorObjRefWalker;
import com.oracle.svm.core.log.Log;
import com.oracle.svm.core.log.StringBuilderLog;
Expand All @@ -56,6 +54,8 @@
import com.oracle.svm.core.thread.VMOperation;
import com.oracle.svm.core.thread.VMThreads;

import jdk.graal.compiler.word.Word;

/** Determines paths from roots to objects or heap regions. */
public final class PathExhibitor {
private static final FrameSlotVisitor frameSlotVisitor = new FrameSlotVisitor();
Expand Down Expand Up @@ -155,16 +155,14 @@ private static void findPathInStack(TargetMatcher target, PathEdge edge, Pointer
JavaStackWalker.walkCurrentThread(currentThreadWalkStackPointer, stackFrameVisitor);
stackFrameVisitor.reset();

if (SubstrateOptions.MultiThreaded.getValue()) {
IsolateThread thread = VMThreads.firstThread();
while (!edge.isFilled() && thread.isNonNull()) {
if (thread.notEqual(CurrentIsolate.getCurrentThread())) { // walked above
stackFrameVisitor.initialize(target, edge);
JavaStackWalker.walkThread(thread, stackFrameVisitor);
stackFrameVisitor.reset();
}
thread = VMThreads.nextThread(thread);
IsolateThread thread = VMThreads.firstThread();
while (!edge.isFilled() && thread.isNonNull()) {
if (thread.notEqual(CurrentIsolate.getCurrentThread())) { // walked above
stackFrameVisitor.initialize(target, edge);
JavaStackWalker.walkThread(thread, stackFrameVisitor);
stackFrameVisitor.reset();
}
thread = VMThreads.nextThread(thread);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,6 @@
import org.graalvm.word.WordFactory;

import com.oracle.svm.core.AlwaysInline;
import com.oracle.svm.core.SubstrateOptions;
import com.oracle.svm.core.Uninterruptible;
import com.oracle.svm.core.UnmanagedMemoryUtil;
import com.oracle.svm.core.config.ConfigurationValues;
Expand Down Expand Up @@ -232,9 +231,7 @@ void appendAlignedHeapChunk(AlignedHeapChunk.AlignedHeader aChunk) {
* This method is used from {@link PosixJavaThreads#detachThread(VMThread)}, so it can not
* guarantee that it is inside a VMOperation, only that there is some mutual exclusion.
*/
if (SubstrateOptions.MultiThreaded.getValue()) {
VMThreads.guaranteeOwnsThreadMutex("Trying to append an aligned heap chunk but no mutual exclusion.", true);
}
VMThreads.guaranteeOwnsThreadMutex("Trying to append an aligned heap chunk but no mutual exclusion.", true);
appendAlignedHeapChunkUninterruptibly(aChunk);
accounting.noteAlignedHeapChunk();
}
Expand Down Expand Up @@ -286,9 +283,7 @@ void appendUnalignedHeapChunk(UnalignedHeapChunk.UnalignedHeader uChunk) {
* This method is used from {@link PosixJavaThreads#detachThread(VMThread)}, so it can not
* guarantee that it is inside a VMOperation, only that there is some mutual exclusion.
*/
if (SubstrateOptions.MultiThreaded.getValue()) {
VMThreads.guaranteeOwnsThreadMutex("Trying to append an unaligned chunk but no mutual exclusion.", true);
}
VMThreads.guaranteeOwnsThreadMutex("Trying to append an unaligned chunk but no mutual exclusion.", true);
appendUnalignedHeapChunkUninterruptibly(uChunk);
accounting.noteUnalignedHeapChunk(uChunk);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,6 @@
import org.graalvm.word.Pointer;

import com.oracle.svm.core.NeverInline;
import com.oracle.svm.core.SubstrateOptions;
import com.oracle.svm.core.code.CodeInfo;
import com.oracle.svm.core.code.CodeInfoTable;
import com.oracle.svm.core.deopt.DeoptimizedFrame;
Expand All @@ -59,16 +58,14 @@ public static boolean verifyAllThreads() {
JavaStackWalker.walkCurrentThread(KnownIntrinsics.readCallerStackPointer(), STACK_FRAME_VISITOR);
result &= STACK_FRAME_VISITOR.result;

if (SubstrateOptions.MultiThreaded.getValue()) {
for (IsolateThread thread = VMThreads.firstThread(); thread.isNonNull(); thread = VMThreads.nextThread(thread)) {
if (thread == CurrentIsolate.getCurrentThread()) {
continue;
}

STACK_FRAME_VISITOR.initialize();
JavaStackWalker.walkThread(thread, STACK_FRAME_VISITOR);
result &= STACK_FRAME_VISITOR.result;
for (IsolateThread thread = VMThreads.firstThread(); thread.isNonNull(); thread = VMThreads.nextThread(thread)) {
if (thread == CurrentIsolate.getCurrentThread()) {
continue;
}

STACK_FRAME_VISITOR.initialize();
JavaStackWalker.walkThread(thread, STACK_FRAME_VISITOR);
result &= STACK_FRAME_VISITOR.result;
}
return result;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,9 +27,6 @@
import static com.oracle.svm.core.graal.snippets.SubstrateAllocationSnippets.TLAB_END_IDENTITY;
import static com.oracle.svm.core.graal.snippets.SubstrateAllocationSnippets.TLAB_TOP_IDENTITY;

import jdk.graal.compiler.api.replacements.Fold;
import jdk.graal.compiler.replacements.AllocationSnippets.FillContent;
import jdk.graal.compiler.word.Word;
import org.graalvm.nativeimage.CurrentIsolate;
import org.graalvm.nativeimage.IsolateThread;
import org.graalvm.nativeimage.Platform;
Expand All @@ -46,7 +43,6 @@
import org.graalvm.word.WordFactory;

import com.oracle.svm.core.SubstrateGCOptions;
import com.oracle.svm.core.SubstrateOptions;
import com.oracle.svm.core.Uninterruptible;
import com.oracle.svm.core.genscavenge.AlignedHeapChunk.AlignedHeader;
import com.oracle.svm.core.genscavenge.UnalignedHeapChunk.UnalignedHeader;
Expand Down Expand Up @@ -76,6 +72,10 @@
import com.oracle.svm.core.threadlocal.FastThreadLocalWord;
import com.oracle.svm.core.util.VMError;

import jdk.graal.compiler.api.replacements.Fold;
import jdk.graal.compiler.replacements.AllocationSnippets.FillContent;
import jdk.graal.compiler.word.Word;

/**
* Bump-pointer allocation from thread-local top and end Pointers. Many of these methods are called
* from allocation snippets, so they can not do anything fancy. q It happens that prefetch
Expand Down Expand Up @@ -428,12 +428,8 @@ private static void guaranteeZeroed(Pointer memory, UnsignedWord size) {
static void disableAndFlushForAllThreads() {
VMOperation.guaranteeInProgress("ThreadLocalAllocation.disableAndFlushForAllThreads");

if (SubstrateOptions.MultiThreaded.getValue()) {
for (IsolateThread vmThread = VMThreads.firstThread(); vmThread.isNonNull(); vmThread = VMThreads.nextThread(vmThread)) {
disableAndFlushForThread(vmThread);
}
} else {
disableAndFlushForThread(WordFactory.nullPointer());
for (IsolateThread vmThread = VMThreads.firstThread(); vmThread.isNonNull(); vmThread = VMThreads.nextThread(vmThread)) {
disableAndFlushForThread(vmThread);
}
}

Expand All @@ -444,12 +440,9 @@ static void disableAndFlushForThread(IsolateThread vmThread) {

@Uninterruptible(reason = "Called from uninterruptible code.", mayBeInlined = true)
static void tearDown() {
IsolateThread thread = WordFactory.nullPointer();
if (SubstrateOptions.MultiThreaded.getValue()) {
// no other thread is alive, so it is always safe to access the first thread
thread = VMThreads.firstThreadUnsafe();
VMError.guarantee(VMThreads.nextThread(thread).isNull(), "Other isolate threads are still active");
}
// no other thread is alive, so it is always safe to access the first thread
IsolateThread thread = VMThreads.firstThreadUnsafe();
VMError.guarantee(VMThreads.nextThread(thread).isNull(), "Other isolate threads are still active");
freeHeapChunks(getTlab(thread));
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,11 +34,11 @@

public final class AArch64ReservedRegisters extends ReservedRegisters {

public static final Register THREAD_REGISTER_CANDIDATE = AArch64.r28;
public static final Register THREAD_REGISTER = AArch64.r28;
public static final Register HEAP_BASE_REGISTER_CANDIDATE = AArch64.r27;

@Platforms(Platform.HOSTED_ONLY.class)
AArch64ReservedRegisters() {
super(AArch64.sp, THREAD_REGISTER_CANDIDATE, HEAP_BASE_REGISTER_CANDIDATE);
super(AArch64.sp, THREAD_REGISTER, HEAP_BASE_REGISTER_CANDIDATE);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -24,20 +24,18 @@
*/
package com.oracle.svm.core.graal.aarch64;

import com.oracle.svm.core.ReservedRegisters;
import com.oracle.svm.core.nodes.SafepointCheckNode;
import com.oracle.svm.core.thread.Safepoint;
import com.oracle.svm.core.thread.ThreadingSupportImpl;

import jdk.graal.compiler.asm.aarch64.AArch64Address;
import jdk.graal.compiler.asm.aarch64.AArch64Assembler;
import jdk.graal.compiler.asm.aarch64.AArch64MacroAssembler;
import jdk.graal.compiler.asm.aarch64.AArch64MacroAssembler.ScratchRegister;
import jdk.graal.compiler.lir.LIRInstructionClass;
import jdk.graal.compiler.lir.aarch64.AArch64LIRInstruction;
import jdk.graal.compiler.lir.asm.CompilationResultBuilder;

import com.oracle.svm.core.ReservedRegisters;
import com.oracle.svm.core.SubstrateOptions;
import com.oracle.svm.core.nodes.SafepointCheckNode;
import com.oracle.svm.core.thread.Safepoint;
import com.oracle.svm.core.thread.ThreadingSupportImpl;

import jdk.vm.ci.code.Register;

/**
Expand All @@ -53,7 +51,6 @@ public AArch64SafepointCheckOp() {

@Override
public void emitCode(CompilationResultBuilder crb, AArch64MacroAssembler masm) {
assert SubstrateOptions.MultiThreaded.getValue();
int safepointSize = 32; // safepoint is an integer
AArch64Address safepointAddress = AArch64Address.createImmediateAddress(safepointSize, AArch64Address.AddressingMode.IMMEDIATE_UNSIGNED_SCALED,
ReservedRegisters.singleton().getThreadRegister(),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -425,19 +425,17 @@ static void maybeTransitionToNative(CompilationResultBuilder crb, AArch64MacroAs
AArch64Address.createImmediateAddress(64, AArch64Address.AddressingMode.IMMEDIATE_UNSIGNED_SCALED, anchor, knownOffsets.getJavaFrameAnchorLastSPOffset()));
}

if (SubstrateOptions.MultiThreaded.getValue()) {
/*
* Change VMThread status from Java to Native. Note a "store release" is needed for this
* update to ensure VMThread status is only updated once all prior stores are also
* observable.
*/
try (ScratchRegister scratch1 = masm.getScratchRegister(); ScratchRegister scratch2 = masm.getScratchRegister()) {
Register statusValueRegister = scratch1.getRegister();
Register statusAddressRegister = scratch2.getRegister();
masm.mov(statusValueRegister, newThreadStatus);
masm.loadAlignedAddress(32, statusAddressRegister, ReservedRegisters.singleton().getThreadRegister(), knownOffsets.getVMThreadStatusOffset());
masm.stlr(32, statusValueRegister, statusAddressRegister);
}
/*
* Change VMThread status from Java to Native. Note a "store release" is needed for this
* update to ensure VMThread status is only updated once all prior stores are also
* observable.
*/
try (ScratchRegister scratch1 = masm.getScratchRegister(); ScratchRegister scratch2 = masm.getScratchRegister()) {
Register statusValueRegister = scratch1.getRegister();
Register statusAddressRegister = scratch2.getRegister();
masm.mov(statusValueRegister, newThreadStatus);
masm.loadAlignedAddress(32, statusAddressRegister, ReservedRegisters.singleton().getThreadRegister(), knownOffsets.getVMThreadStatusOffset());
masm.stlr(32, statusValueRegister, statusAddressRegister);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,11 +37,11 @@

public final class AMD64ReservedRegisters extends ReservedRegisters {

public static final Register THREAD_REGISTER_CANDIDATE = r15;
public static final Register THREAD_REGISTER = r15;
public static final Register HEAP_BASE_REGISTER_CANDIDATE = r14;

@Platforms(Platform.HOSTED_ONLY.class)
AMD64ReservedRegisters() {
super(AMD64.rsp, THREAD_REGISTER_CANDIDATE, HEAP_BASE_REGISTER_CANDIDATE);
super(AMD64.rsp, THREAD_REGISTER, HEAP_BASE_REGISTER_CANDIDATE);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,11 @@
*/
package com.oracle.svm.core.graal.amd64;

import com.oracle.svm.core.ReservedRegisters;
import com.oracle.svm.core.nodes.SafepointCheckNode;
import com.oracle.svm.core.thread.Safepoint;
import com.oracle.svm.core.thread.ThreadingSupportImpl;

import jdk.graal.compiler.asm.amd64.AMD64Address;
import jdk.graal.compiler.asm.amd64.AMD64Assembler;
import jdk.graal.compiler.asm.amd64.AMD64MacroAssembler;
Expand All @@ -32,12 +37,6 @@
import jdk.graal.compiler.lir.amd64.AMD64LIRInstruction;
import jdk.graal.compiler.lir.asm.CompilationResultBuilder;

import com.oracle.svm.core.ReservedRegisters;
import com.oracle.svm.core.SubstrateOptions;
import com.oracle.svm.core.nodes.SafepointCheckNode;
import com.oracle.svm.core.thread.Safepoint;
import com.oracle.svm.core.thread.ThreadingSupportImpl;

/**
* Compact instruction for {@link SafepointCheckNode}.
*/
Expand All @@ -52,7 +51,6 @@ public AMD64SafepointCheckOp() {

@Override
public void emitCode(CompilationResultBuilder crb, AMD64MacroAssembler masm) {
assert SubstrateOptions.MultiThreaded.getValue();
int safepointRequestedOffset = Safepoint.getThreadLocalSafepointRequestedOffset();
AMD64Address safepointRequested = new AMD64Address(ReservedRegisters.singleton().getThreadRegister(), safepointRequestedOffset);
if (ThreadingSupportImpl.isRecurringCallbackSupported()) {
Expand Down
Loading

0 comments on commit e75d2d7

Please sign in to comment.