org.truffleruby.language.objects.shared.WriteBarrierNodeGen Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of ruby-language Show documentation
Show all versions of ruby-language Show documentation
Core module of Ruby on Truffle
The newest version!
// CheckStyle: start generated
package org.truffleruby.language.objects.shared;
import com.oracle.truffle.api.Assumption;
import com.oracle.truffle.api.CompilerDirectives;
import com.oracle.truffle.api.CompilerDirectives.CompilationFinal;
import com.oracle.truffle.api.CompilerDirectives.TruffleBoundary;
import com.oracle.truffle.api.dsl.DSLSupport;
import com.oracle.truffle.api.dsl.GeneratedBy;
import com.oracle.truffle.api.dsl.NeverDefault;
import com.oracle.truffle.api.dsl.UnsupportedSpecializationException;
import com.oracle.truffle.api.dsl.DSLSupport.SpecializationDataNode;
import com.oracle.truffle.api.dsl.InlineSupport.InlineTarget;
import com.oracle.truffle.api.dsl.InlineSupport.ReferenceField;
import com.oracle.truffle.api.dsl.InlineSupport.RequiredField;
import com.oracle.truffle.api.dsl.InlineSupport.StateField;
import com.oracle.truffle.api.dsl.InlineSupport.UnsafeAccessedField;
import com.oracle.truffle.api.nodes.DenyReplace;
import com.oracle.truffle.api.nodes.Node;
import com.oracle.truffle.api.nodes.UnadoptableNode;
import com.oracle.truffle.api.object.Shape;
import java.lang.invoke.MethodHandles;
import java.lang.invoke.VarHandle;
import java.util.Objects;
import org.truffleruby.core.FinalizerReference;
import org.truffleruby.language.RubyDynamicObject;
import org.truffleruby.language.RubyGuards;
import org.truffleruby.language.objects.ShapeCachingGuards;
/**
* Debug Info:
* Specialization {@link WriteBarrierNode#noWriteBarrier}
* Activation probability: 0.23929
* With/without class size: 6/0 bytes
* Specialization {@link WriteBarrierNode#alreadySharedCached}
* Activation probability: 0.20714
* With/without class size: 7/4 bytes
* Specialization {@link WriteBarrierNode#alreadySharedUncached}
* Activation probability: 0.17500
* With/without class size: 6/0 bytes
* Specialization {@link WriteBarrierNode#writeBarrierCached}
* Activation probability: 0.14286
* With/without class size: 7/8 bytes
* Specialization {@link WriteBarrierNode#updateShapeAndWriteBarrier}
* Activation probability: 0.11071
* With/without class size: 6/4 bytes
* Specialization {@link WriteBarrierNode#writeBarrierUncached}
* Activation probability: 0.07857
* With/without class size: 4/0 bytes
* Specialization {@link WriteBarrierNode#writeBarrierFinalizer}
* Activation probability: 0.04643
* With/without class size: 4/0 bytes
*
*/
@GeneratedBy(WriteBarrierNode.class)
@SuppressWarnings({"javadoc", "unused"})
public final class WriteBarrierNodeGen extends WriteBarrierNode {
static final ReferenceField ALREADY_SHARED_CACHED_CACHE_UPDATER = ReferenceField.create(MethodHandles.lookup(), "alreadySharedCached_cache", AlreadySharedCachedData.class);
static final ReferenceField WRITE_BARRIER_CACHED_CACHE_UPDATER = ReferenceField.create(MethodHandles.lookup(), "writeBarrierCached_cache", WriteBarrierCachedData.class);
private static final Uncached UNCACHED = new Uncached();
/**
* State Info:
* 0: SpecializationActive {@link WriteBarrierNode#noWriteBarrier}
* 1: SpecializationActive {@link WriteBarrierNode#alreadySharedCached}
* 2: SpecializationActive {@link WriteBarrierNode#alreadySharedUncached}
* 3: SpecializationActive {@link WriteBarrierNode#writeBarrierCached}
* 4: SpecializationActive {@link WriteBarrierNode#writeBarrierUncached}
* 5: SpecializationActive {@link WriteBarrierNode#updateShapeAndWriteBarrier}
* 6: SpecializationActive {@link WriteBarrierNode#writeBarrierFinalizer}
*
*/
@CompilationFinal private int state_0_;
@UnsafeAccessedField @CompilationFinal private AlreadySharedCachedData alreadySharedCached_cache;
@UnsafeAccessedField @Child private WriteBarrierCachedData writeBarrierCached_cache;
/**
* Source Info:
* Specialization: {@link WriteBarrierNode#updateShapeAndWriteBarrier}
* Parameter: {@link WriteBarrierNode} writeBarrierNode
*/
@Child private WriteBarrierNode updateShapeAndWriteBarrier_writeBarrierNode_;
private WriteBarrierNodeGen() {
}
@Override
protected void executeInternal(Node arg0Value, Object arg1Value, int arg2Value) {
int state_0 = this.state_0_;
if (state_0 != 0 /* is SpecializationActive[WriteBarrierNode.noWriteBarrier(Node, Object, int)] || SpecializationActive[WriteBarrierNode.alreadySharedCached(RubyDynamicObject, int, Shape)] || SpecializationActive[WriteBarrierNode.alreadySharedUncached(RubyDynamicObject, int)] || SpecializationActive[WriteBarrierNode.writeBarrierCached(Node, RubyDynamicObject, int, Shape, ShareObjectNode)] || SpecializationActive[WriteBarrierNode.updateShapeAndWriteBarrier(RubyDynamicObject, int, WriteBarrierNode)] || SpecializationActive[WriteBarrierNode.writeBarrierUncached(Node, RubyDynamicObject, int)] || SpecializationActive[WriteBarrierNode.writeBarrierFinalizer(Node, FinalizerReference, int)] */) {
if ((state_0 & 0b1) != 0 /* is SpecializationActive[WriteBarrierNode.noWriteBarrier(Node, Object, int)] */) {
if ((!(RubyGuards.isRubyDynamicObject(arg1Value))) && (!(WriteBarrierNode.isFinalizer(arg1Value)))) {
WriteBarrierNode.noWriteBarrier(this, arg1Value, arg2Value);
return;
}
}
if ((state_0 & 0b111110) != 0 /* is SpecializationActive[WriteBarrierNode.alreadySharedCached(RubyDynamicObject, int, Shape)] || SpecializationActive[WriteBarrierNode.alreadySharedUncached(RubyDynamicObject, int)] || SpecializationActive[WriteBarrierNode.writeBarrierCached(Node, RubyDynamicObject, int, Shape, ShareObjectNode)] || SpecializationActive[WriteBarrierNode.updateShapeAndWriteBarrier(RubyDynamicObject, int, WriteBarrierNode)] || SpecializationActive[WriteBarrierNode.writeBarrierUncached(Node, RubyDynamicObject, int)] */ && arg1Value instanceof RubyDynamicObject) {
RubyDynamicObject arg1Value_ = (RubyDynamicObject) arg1Value;
if ((state_0 & 0b10) != 0 /* is SpecializationActive[WriteBarrierNode.alreadySharedCached(RubyDynamicObject, int, Shape)] */) {
AlreadySharedCachedData s1_ = this.alreadySharedCached_cache;
if (s1_ != null) {
if ((arg1Value_.getShape() == s1_.cachedShape_)) {
assert DSLSupport.assertIdempotence((s1_.cachedShape_.isShared()));
WriteBarrierNode.alreadySharedCached(arg1Value_, arg2Value, s1_.cachedShape_);
return;
}
}
}
if ((state_0 & 0b100) != 0 /* is SpecializationActive[WriteBarrierNode.alreadySharedUncached(RubyDynamicObject, int)] */) {
if ((arg1Value_.getShape().isShared())) {
WriteBarrierNode.alreadySharedUncached(arg1Value_, arg2Value);
return;
}
}
if ((state_0 & 0b1000) != 0 /* is SpecializationActive[WriteBarrierNode.writeBarrierCached(Node, RubyDynamicObject, int, Shape, ShareObjectNode)] */) {
WriteBarrierCachedData s3_ = this.writeBarrierCached_cache;
if (s3_ != null) {
if (!Assumption.isValidAssumption(s3_.assumption0_)) {
CompilerDirectives.transferToInterpreterAndInvalidate();
removeWriteBarrierCached_(s3_);
executeAndSpecialize(arg0Value, arg1Value_, arg2Value);
return;
}
if ((arg2Value < WriteBarrierNode.MAX_DEPTH) && (arg1Value_.getShape() == s3_.cachedShape_)) {
assert DSLSupport.assertIdempotence((!(s3_.cachedShape_.isShared())));
WriteBarrierNode.writeBarrierCached(this, arg1Value_, arg2Value, s3_.cachedShape_, s3_.shareObjectNode_);
return;
}
}
}
if ((state_0 & 0b100000) != 0 /* is SpecializationActive[WriteBarrierNode.updateShapeAndWriteBarrier(RubyDynamicObject, int, WriteBarrierNode)] */) {
{
WriteBarrierNode writeBarrierNode__ = this.updateShapeAndWriteBarrier_writeBarrierNode_;
if (writeBarrierNode__ != null) {
if ((ShapeCachingGuards.updateShape(arg1Value_))) {
WriteBarrierNode.updateShapeAndWriteBarrier(arg1Value_, arg2Value, writeBarrierNode__);
return;
}
}
}
}
if ((state_0 & 0b10000) != 0 /* is SpecializationActive[WriteBarrierNode.writeBarrierUncached(Node, RubyDynamicObject, int)] */) {
if ((!(arg1Value_.getShape().isShared()))) {
WriteBarrierNode.writeBarrierUncached(this, arg1Value_, arg2Value);
return;
}
}
}
if ((state_0 & 0b1000000) != 0 /* is SpecializationActive[WriteBarrierNode.writeBarrierFinalizer(Node, FinalizerReference, int)] */ && arg1Value instanceof FinalizerReference) {
FinalizerReference arg1Value_ = (FinalizerReference) arg1Value;
WriteBarrierNode.writeBarrierFinalizer(this, arg1Value_, arg2Value);
return;
}
}
CompilerDirectives.transferToInterpreterAndInvalidate();
executeAndSpecialize(arg0Value, arg1Value, arg2Value);
return;
}
@SuppressWarnings("unused")
private void executeAndSpecialize(Node arg0Value, Object arg1Value, int arg2Value) {
int state_0 = this.state_0_;
int oldState_0 = state_0;
try {
if ((!(RubyGuards.isRubyDynamicObject(arg1Value))) && (!(WriteBarrierNode.isFinalizer(arg1Value)))) {
state_0 = state_0 | 0b1 /* add SpecializationActive[WriteBarrierNode.noWriteBarrier(Node, Object, int)] */;
this.state_0_ = state_0;
WriteBarrierNode.noWriteBarrier(this, arg1Value, arg2Value);
return;
}
if (arg1Value instanceof RubyDynamicObject) {
RubyDynamicObject arg1Value_ = (RubyDynamicObject) arg1Value;
if (((state_0 & 0b100)) == 0 /* is-not SpecializationActive[WriteBarrierNode.alreadySharedUncached(RubyDynamicObject, int)] */) {
while (true) {
int count1_ = 0;
AlreadySharedCachedData s1_ = ALREADY_SHARED_CACHED_CACHE_UPDATER.getVolatile(this);
AlreadySharedCachedData s1_original = s1_;
while (s1_ != null) {
if ((arg1Value_.getShape() == s1_.cachedShape_)) {
assert DSLSupport.assertIdempotence((s1_.cachedShape_.isShared()));
break;
}
count1_++;
s1_ = null;
break;
}
if (s1_ == null && count1_ < 1) {
{
Shape cachedShape__ = (arg1Value_.getShape());
if ((arg1Value_.getShape() == cachedShape__) && (cachedShape__.isShared())) {
s1_ = new AlreadySharedCachedData();
Objects.requireNonNull(cachedShape__, "A specialization cache returned a default value. The cache initializer must never return a default value for this cache. Use @Cached(neverDefault=false) to allow default values for this cached value or make sure the cache initializer never returns the default value.");
s1_.cachedShape_ = cachedShape__;
if (!ALREADY_SHARED_CACHED_CACHE_UPDATER.compareAndSet(this, s1_original, s1_)) {
continue;
}
state_0 = state_0 | 0b10 /* add SpecializationActive[WriteBarrierNode.alreadySharedCached(RubyDynamicObject, int, Shape)] */;
this.state_0_ = state_0;
}
}
}
if (s1_ != null) {
WriteBarrierNode.alreadySharedCached(arg1Value_, arg2Value, s1_.cachedShape_);
return;
}
break;
}
}
if ((arg1Value_.getShape().isShared())) {
this.alreadySharedCached_cache = null;
state_0 = state_0 & 0xfffffffd /* remove SpecializationActive[WriteBarrierNode.alreadySharedCached(RubyDynamicObject, int, Shape)] */;
state_0 = state_0 | 0b100 /* add SpecializationActive[WriteBarrierNode.alreadySharedUncached(RubyDynamicObject, int)] */;
this.state_0_ = state_0;
WriteBarrierNode.alreadySharedUncached(arg1Value_, arg2Value);
return;
}
if (((state_0 & 0b10000)) == 0 /* is-not SpecializationActive[WriteBarrierNode.writeBarrierUncached(Node, RubyDynamicObject, int)] */) {
while (true) {
int count3_ = 0;
WriteBarrierCachedData s3_ = WRITE_BARRIER_CACHED_CACHE_UPDATER.getVolatile(this);
WriteBarrierCachedData s3_original = s3_;
while (s3_ != null) {
if ((arg2Value < WriteBarrierNode.MAX_DEPTH) && (arg1Value_.getShape() == s3_.cachedShape_)) {
assert DSLSupport.assertIdempotence((!(s3_.cachedShape_.isShared())));
if (Assumption.isValidAssumption(s3_.assumption0_)) {
break;
}
}
count3_++;
s3_ = null;
break;
}
if (s3_ == null && count3_ < 1) {
if ((arg2Value < WriteBarrierNode.MAX_DEPTH)) {
Shape cachedShape__1 = (arg1Value_.getShape());
if ((arg1Value_.getShape() == cachedShape__1) && (!(cachedShape__1.isShared()))) {
Assumption assumption0 = (cachedShape__1.getValidAssumption());
if (Assumption.isValidAssumption(assumption0)) {
s3_ = this.insert(new WriteBarrierCachedData());
Objects.requireNonNull(cachedShape__1, "A specialization cache returned a default value. The cache initializer must never return a default value for this cache. Use @Cached(neverDefault=false) to allow default values for this cached value or make sure the cache initializer never returns the default value.");
s3_.cachedShape_ = cachedShape__1;
ShareObjectNode shareObjectNode__ = s3_.insert((ShareObjectNodeGen.create()));
Objects.requireNonNull(shareObjectNode__, "A specialization cache returned a default value. The cache initializer must never return a default value for this cache. Use @Cached(neverDefault=false) to allow default values for this cached value or make sure the cache initializer never returns the default value.");
s3_.shareObjectNode_ = shareObjectNode__;
s3_.assumption0_ = assumption0;
if (!WRITE_BARRIER_CACHED_CACHE_UPDATER.compareAndSet(this, s3_original, s3_)) {
continue;
}
state_0 = state_0 | 0b1000 /* add SpecializationActive[WriteBarrierNode.writeBarrierCached(Node, RubyDynamicObject, int, Shape, ShareObjectNode)] */;
this.state_0_ = state_0;
}
}
}
}
if (s3_ != null) {
WriteBarrierNode.writeBarrierCached(this, arg1Value_, arg2Value, s3_.cachedShape_, s3_.shareObjectNode_);
return;
}
break;
}
}
if (((state_0 & 0b10000)) == 0 /* is-not SpecializationActive[WriteBarrierNode.writeBarrierUncached(Node, RubyDynamicObject, int)] */) {
if ((ShapeCachingGuards.updateShape(arg1Value_))) {
WriteBarrierNode writeBarrierNode__ = this.insert((WriteBarrierNodeGen.create()));
Objects.requireNonNull(writeBarrierNode__, "A specialization cache returned a default value. The cache initializer must never return a default value for this cache. Use @Cached(neverDefault=false) to allow default values for this cached value or make sure the cache initializer never returns the default value.");
VarHandle.storeStoreFence();
this.updateShapeAndWriteBarrier_writeBarrierNode_ = writeBarrierNode__;
state_0 = state_0 | 0b100000 /* add SpecializationActive[WriteBarrierNode.updateShapeAndWriteBarrier(RubyDynamicObject, int, WriteBarrierNode)] */;
this.state_0_ = state_0;
WriteBarrierNode.updateShapeAndWriteBarrier(arg1Value_, arg2Value, writeBarrierNode__);
return;
}
}
if ((!(arg1Value_.getShape().isShared()))) {
this.writeBarrierCached_cache = null;
this.updateShapeAndWriteBarrier_writeBarrierNode_ = null;
state_0 = state_0 & 0xffffffd7 /* remove SpecializationActive[WriteBarrierNode.writeBarrierCached(Node, RubyDynamicObject, int, Shape, ShareObjectNode)], SpecializationActive[WriteBarrierNode.updateShapeAndWriteBarrier(RubyDynamicObject, int, WriteBarrierNode)] */;
state_0 = state_0 | 0b10000 /* add SpecializationActive[WriteBarrierNode.writeBarrierUncached(Node, RubyDynamicObject, int)] */;
this.state_0_ = state_0;
WriteBarrierNode.writeBarrierUncached(this, arg1Value_, arg2Value);
return;
}
}
if (arg1Value instanceof FinalizerReference) {
FinalizerReference arg1Value_ = (FinalizerReference) arg1Value;
state_0 = state_0 | 0b1000000 /* add SpecializationActive[WriteBarrierNode.writeBarrierFinalizer(Node, FinalizerReference, int)] */;
this.state_0_ = state_0;
WriteBarrierNode.writeBarrierFinalizer(this, arg1Value_, arg2Value);
return;
}
throw new UnsupportedSpecializationException(this, null, arg0Value, arg1Value, arg2Value);
} finally {
if (oldState_0 != 0) {
checkForPolymorphicSpecialize(oldState_0);
}
}
}
private void checkForPolymorphicSpecialize(int oldState_0) {
int state_0 = this.state_0_;
int newState_0 = state_0;
if (((oldState_0 ^ newState_0) != 0)) {
this.reportPolymorphicSpecialize();
}
}
void removeWriteBarrierCached_(WriteBarrierCachedData s3_) {
this.state_0_ = state_0_ & 0xfffffff7 /* remove SpecializationActive[WriteBarrierNode.writeBarrierCached(Node, RubyDynamicObject, int, Shape, ShareObjectNode)] */;
this.writeBarrierCached_cache = null;
}
@TruffleBoundary
private static UnsupportedSpecializationException newUnsupportedSpecializationException3LLI(Node thisNode_, Object arg0Value, Object arg1Value, int arg2Value) {
return new UnsupportedSpecializationException(thisNode_, null, arg0Value, arg1Value, arg2Value);
}
@NeverDefault
public static WriteBarrierNode create() {
return new WriteBarrierNodeGen();
}
@NeverDefault
public static WriteBarrierNode getUncached() {
return WriteBarrierNodeGen.UNCACHED;
}
/**
* Required Fields:
* - {@link Inlined#state_0_}
*
- {@link Inlined#alreadySharedCached_cache}
*
- {@link Inlined#writeBarrierCached_cache}
*
- {@link Inlined#updateShapeAndWriteBarrier_writeBarrierNode_}
*
*/
@NeverDefault
public static WriteBarrierNode inline(@RequiredField(bits = 7, value = StateField.class)@RequiredField(type = Object.class, value = ReferenceField.class)@RequiredField(type = Node.class, value = ReferenceField.class)@RequiredField(type = Node.class, value = ReferenceField.class) InlineTarget target) {
return new WriteBarrierNodeGen.Inlined(target);
}
@GeneratedBy(WriteBarrierNode.class)
@DenyReplace
private static final class Inlined extends WriteBarrierNode implements UnadoptableNode {
/**
* State Info:
* 0: SpecializationActive {@link WriteBarrierNode#noWriteBarrier}
* 1: SpecializationActive {@link WriteBarrierNode#alreadySharedCached}
* 2: SpecializationActive {@link WriteBarrierNode#alreadySharedUncached}
* 3: SpecializationActive {@link WriteBarrierNode#writeBarrierCached}
* 4: SpecializationActive {@link WriteBarrierNode#writeBarrierUncached}
* 5: SpecializationActive {@link WriteBarrierNode#updateShapeAndWriteBarrier}
* 6: SpecializationActive {@link WriteBarrierNode#writeBarrierFinalizer}
*
*/
private final StateField state_0_;
private final ReferenceField alreadySharedCached_cache;
private final ReferenceField writeBarrierCached_cache;
private final ReferenceField updateShapeAndWriteBarrier_writeBarrierNode_;
@SuppressWarnings("unchecked")
private Inlined(InlineTarget target) {
assert target.getTargetClass().isAssignableFrom(WriteBarrierNode.class);
this.state_0_ = target.getState(0, 7);
this.alreadySharedCached_cache = target.getReference(1, AlreadySharedCachedData.class);
this.writeBarrierCached_cache = target.getReference(2, WriteBarrierCachedData.class);
this.updateShapeAndWriteBarrier_writeBarrierNode_ = target.getReference(3, WriteBarrierNode.class);
}
@Override
protected void executeInternal(Node arg0Value, Object arg1Value, int arg2Value) {
int state_0 = this.state_0_.get(arg0Value);
if (state_0 != 0 /* is SpecializationActive[WriteBarrierNode.noWriteBarrier(Node, Object, int)] || SpecializationActive[WriteBarrierNode.alreadySharedCached(RubyDynamicObject, int, Shape)] || SpecializationActive[WriteBarrierNode.alreadySharedUncached(RubyDynamicObject, int)] || SpecializationActive[WriteBarrierNode.writeBarrierCached(Node, RubyDynamicObject, int, Shape, ShareObjectNode)] || SpecializationActive[WriteBarrierNode.updateShapeAndWriteBarrier(RubyDynamicObject, int, WriteBarrierNode)] || SpecializationActive[WriteBarrierNode.writeBarrierUncached(Node, RubyDynamicObject, int)] || SpecializationActive[WriteBarrierNode.writeBarrierFinalizer(Node, FinalizerReference, int)] */) {
if ((state_0 & 0b1) != 0 /* is SpecializationActive[WriteBarrierNode.noWriteBarrier(Node, Object, int)] */) {
if ((!(RubyGuards.isRubyDynamicObject(arg1Value))) && (!(WriteBarrierNode.isFinalizer(arg1Value)))) {
WriteBarrierNode.noWriteBarrier(arg0Value, arg1Value, arg2Value);
return;
}
}
if ((state_0 & 0b111110) != 0 /* is SpecializationActive[WriteBarrierNode.alreadySharedCached(RubyDynamicObject, int, Shape)] || SpecializationActive[WriteBarrierNode.alreadySharedUncached(RubyDynamicObject, int)] || SpecializationActive[WriteBarrierNode.writeBarrierCached(Node, RubyDynamicObject, int, Shape, ShareObjectNode)] || SpecializationActive[WriteBarrierNode.updateShapeAndWriteBarrier(RubyDynamicObject, int, WriteBarrierNode)] || SpecializationActive[WriteBarrierNode.writeBarrierUncached(Node, RubyDynamicObject, int)] */ && arg1Value instanceof RubyDynamicObject) {
RubyDynamicObject arg1Value_ = (RubyDynamicObject) arg1Value;
if ((state_0 & 0b10) != 0 /* is SpecializationActive[WriteBarrierNode.alreadySharedCached(RubyDynamicObject, int, Shape)] */) {
AlreadySharedCachedData s1_ = this.alreadySharedCached_cache.get(arg0Value);
if (s1_ != null) {
if ((arg1Value_.getShape() == s1_.cachedShape_)) {
assert DSLSupport.assertIdempotence((s1_.cachedShape_.isShared()));
WriteBarrierNode.alreadySharedCached(arg1Value_, arg2Value, s1_.cachedShape_);
return;
}
}
}
if ((state_0 & 0b100) != 0 /* is SpecializationActive[WriteBarrierNode.alreadySharedUncached(RubyDynamicObject, int)] */) {
if ((arg1Value_.getShape().isShared())) {
WriteBarrierNode.alreadySharedUncached(arg1Value_, arg2Value);
return;
}
}
if ((state_0 & 0b1000) != 0 /* is SpecializationActive[WriteBarrierNode.writeBarrierCached(Node, RubyDynamicObject, int, Shape, ShareObjectNode)] */) {
WriteBarrierCachedData s3_ = this.writeBarrierCached_cache.get(arg0Value);
if (s3_ != null) {
if (!Assumption.isValidAssumption(s3_.assumption0_)) {
CompilerDirectives.transferToInterpreterAndInvalidate();
removeWriteBarrierCached_(arg0Value, s3_);
executeAndSpecialize(arg0Value, arg1Value_, arg2Value);
return;
}
if ((arg2Value < WriteBarrierNode.MAX_DEPTH) && (arg1Value_.getShape() == s3_.cachedShape_)) {
assert DSLSupport.assertIdempotence((!(s3_.cachedShape_.isShared())));
WriteBarrierNode.writeBarrierCached(arg0Value, arg1Value_, arg2Value, s3_.cachedShape_, s3_.shareObjectNode_);
return;
}
}
}
if ((state_0 & 0b100000) != 0 /* is SpecializationActive[WriteBarrierNode.updateShapeAndWriteBarrier(RubyDynamicObject, int, WriteBarrierNode)] */) {
{
WriteBarrierNode writeBarrierNode__ = this.updateShapeAndWriteBarrier_writeBarrierNode_.get(arg0Value);
if (writeBarrierNode__ != null) {
if ((ShapeCachingGuards.updateShape(arg1Value_))) {
WriteBarrierNode.updateShapeAndWriteBarrier(arg1Value_, arg2Value, writeBarrierNode__);
return;
}
}
}
}
if ((state_0 & 0b10000) != 0 /* is SpecializationActive[WriteBarrierNode.writeBarrierUncached(Node, RubyDynamicObject, int)] */) {
if ((!(arg1Value_.getShape().isShared()))) {
WriteBarrierNode.writeBarrierUncached(arg0Value, arg1Value_, arg2Value);
return;
}
}
}
if ((state_0 & 0b1000000) != 0 /* is SpecializationActive[WriteBarrierNode.writeBarrierFinalizer(Node, FinalizerReference, int)] */ && arg1Value instanceof FinalizerReference) {
FinalizerReference arg1Value_ = (FinalizerReference) arg1Value;
WriteBarrierNode.writeBarrierFinalizer(arg0Value, arg1Value_, arg2Value);
return;
}
}
CompilerDirectives.transferToInterpreterAndInvalidate();
executeAndSpecialize(arg0Value, arg1Value, arg2Value);
return;
}
@SuppressWarnings("unused")
private void executeAndSpecialize(Node arg0Value, Object arg1Value, int arg2Value) {
int state_0 = this.state_0_.get(arg0Value);
int oldState_0 = state_0;
try {
if ((!(RubyGuards.isRubyDynamicObject(arg1Value))) && (!(WriteBarrierNode.isFinalizer(arg1Value)))) {
state_0 = state_0 | 0b1 /* add SpecializationActive[WriteBarrierNode.noWriteBarrier(Node, Object, int)] */;
this.state_0_.set(arg0Value, state_0);
WriteBarrierNode.noWriteBarrier(arg0Value, arg1Value, arg2Value);
return;
}
if (arg1Value instanceof RubyDynamicObject) {
RubyDynamicObject arg1Value_ = (RubyDynamicObject) arg1Value;
if (((state_0 & 0b100)) == 0 /* is-not SpecializationActive[WriteBarrierNode.alreadySharedUncached(RubyDynamicObject, int)] */) {
while (true) {
int count1_ = 0;
AlreadySharedCachedData s1_ = this.alreadySharedCached_cache.getVolatile(arg0Value);
AlreadySharedCachedData s1_original = s1_;
while (s1_ != null) {
if ((arg1Value_.getShape() == s1_.cachedShape_)) {
assert DSLSupport.assertIdempotence((s1_.cachedShape_.isShared()));
break;
}
count1_++;
s1_ = null;
break;
}
if (s1_ == null && count1_ < 1) {
{
Shape cachedShape__ = (arg1Value_.getShape());
if ((arg1Value_.getShape() == cachedShape__) && (cachedShape__.isShared())) {
s1_ = new AlreadySharedCachedData();
Objects.requireNonNull(cachedShape__, "A specialization cache returned a default value. The cache initializer must never return a default value for this cache. Use @Cached(neverDefault=false) to allow default values for this cached value or make sure the cache initializer never returns the default value.");
s1_.cachedShape_ = cachedShape__;
if (!this.alreadySharedCached_cache.compareAndSet(arg0Value, s1_original, s1_)) {
continue;
}
state_0 = state_0 | 0b10 /* add SpecializationActive[WriteBarrierNode.alreadySharedCached(RubyDynamicObject, int, Shape)] */;
this.state_0_.set(arg0Value, state_0);
}
}
}
if (s1_ != null) {
WriteBarrierNode.alreadySharedCached(arg1Value_, arg2Value, s1_.cachedShape_);
return;
}
break;
}
}
if ((arg1Value_.getShape().isShared())) {
this.alreadySharedCached_cache.set(arg0Value, null);
state_0 = state_0 & 0xfffffffd /* remove SpecializationActive[WriteBarrierNode.alreadySharedCached(RubyDynamicObject, int, Shape)] */;
state_0 = state_0 | 0b100 /* add SpecializationActive[WriteBarrierNode.alreadySharedUncached(RubyDynamicObject, int)] */;
this.state_0_.set(arg0Value, state_0);
WriteBarrierNode.alreadySharedUncached(arg1Value_, arg2Value);
return;
}
if (((state_0 & 0b10000)) == 0 /* is-not SpecializationActive[WriteBarrierNode.writeBarrierUncached(Node, RubyDynamicObject, int)] */) {
while (true) {
int count3_ = 0;
WriteBarrierCachedData s3_ = this.writeBarrierCached_cache.getVolatile(arg0Value);
WriteBarrierCachedData s3_original = s3_;
while (s3_ != null) {
if ((arg2Value < WriteBarrierNode.MAX_DEPTH) && (arg1Value_.getShape() == s3_.cachedShape_)) {
assert DSLSupport.assertIdempotence((!(s3_.cachedShape_.isShared())));
if (Assumption.isValidAssumption(s3_.assumption0_)) {
break;
}
}
count3_++;
s3_ = null;
break;
}
if (s3_ == null && count3_ < 1) {
if ((arg2Value < WriteBarrierNode.MAX_DEPTH)) {
Shape cachedShape__1 = (arg1Value_.getShape());
if ((arg1Value_.getShape() == cachedShape__1) && (!(cachedShape__1.isShared()))) {
Assumption assumption0 = (cachedShape__1.getValidAssumption());
if (Assumption.isValidAssumption(assumption0)) {
s3_ = arg0Value.insert(new WriteBarrierCachedData());
Objects.requireNonNull(cachedShape__1, "A specialization cache returned a default value. The cache initializer must never return a default value for this cache. Use @Cached(neverDefault=false) to allow default values for this cached value or make sure the cache initializer never returns the default value.");
s3_.cachedShape_ = cachedShape__1;
ShareObjectNode shareObjectNode__ = s3_.insert((ShareObjectNodeGen.create()));
Objects.requireNonNull(shareObjectNode__, "A specialization cache returned a default value. The cache initializer must never return a default value for this cache. Use @Cached(neverDefault=false) to allow default values for this cached value or make sure the cache initializer never returns the default value.");
s3_.shareObjectNode_ = shareObjectNode__;
s3_.assumption0_ = assumption0;
if (!this.writeBarrierCached_cache.compareAndSet(arg0Value, s3_original, s3_)) {
continue;
}
state_0 = state_0 | 0b1000 /* add SpecializationActive[WriteBarrierNode.writeBarrierCached(Node, RubyDynamicObject, int, Shape, ShareObjectNode)] */;
this.state_0_.set(arg0Value, state_0);
}
}
}
}
if (s3_ != null) {
WriteBarrierNode.writeBarrierCached(arg0Value, arg1Value_, arg2Value, s3_.cachedShape_, s3_.shareObjectNode_);
return;
}
break;
}
}
if (((state_0 & 0b10000)) == 0 /* is-not SpecializationActive[WriteBarrierNode.writeBarrierUncached(Node, RubyDynamicObject, int)] */) {
if ((ShapeCachingGuards.updateShape(arg1Value_))) {
WriteBarrierNode writeBarrierNode__ = arg0Value.insert((WriteBarrierNodeGen.create()));
Objects.requireNonNull(writeBarrierNode__, "A specialization cache returned a default value. The cache initializer must never return a default value for this cache. Use @Cached(neverDefault=false) to allow default values for this cached value or make sure the cache initializer never returns the default value.");
VarHandle.storeStoreFence();
this.updateShapeAndWriteBarrier_writeBarrierNode_.set(arg0Value, writeBarrierNode__);
state_0 = state_0 | 0b100000 /* add SpecializationActive[WriteBarrierNode.updateShapeAndWriteBarrier(RubyDynamicObject, int, WriteBarrierNode)] */;
this.state_0_.set(arg0Value, state_0);
WriteBarrierNode.updateShapeAndWriteBarrier(arg1Value_, arg2Value, writeBarrierNode__);
return;
}
}
if ((!(arg1Value_.getShape().isShared()))) {
this.writeBarrierCached_cache.set(arg0Value, null);
this.updateShapeAndWriteBarrier_writeBarrierNode_.set(arg0Value, null);
state_0 = state_0 & 0xffffffd7 /* remove SpecializationActive[WriteBarrierNode.writeBarrierCached(Node, RubyDynamicObject, int, Shape, ShareObjectNode)], SpecializationActive[WriteBarrierNode.updateShapeAndWriteBarrier(RubyDynamicObject, int, WriteBarrierNode)] */;
state_0 = state_0 | 0b10000 /* add SpecializationActive[WriteBarrierNode.writeBarrierUncached(Node, RubyDynamicObject, int)] */;
this.state_0_.set(arg0Value, state_0);
WriteBarrierNode.writeBarrierUncached(arg0Value, arg1Value_, arg2Value);
return;
}
}
if (arg1Value instanceof FinalizerReference) {
FinalizerReference arg1Value_ = (FinalizerReference) arg1Value;
state_0 = state_0 | 0b1000000 /* add SpecializationActive[WriteBarrierNode.writeBarrierFinalizer(Node, FinalizerReference, int)] */;
this.state_0_.set(arg0Value, state_0);
WriteBarrierNode.writeBarrierFinalizer(arg0Value, arg1Value_, arg2Value);
return;
}
throw newUnsupportedSpecializationException3LLI(this, arg0Value, arg1Value, arg2Value);
} finally {
if (oldState_0 != 0) {
checkForPolymorphicSpecialize(arg0Value, oldState_0);
}
}
}
private void checkForPolymorphicSpecialize(Node arg0Value, int oldState_0) {
int state_0 = this.state_0_.get(arg0Value);
int newState_0 = state_0;
if (((oldState_0 ^ newState_0) != 0)) {
arg0Value.reportPolymorphicSpecialize();
}
}
void removeWriteBarrierCached_(Node arg0Value, WriteBarrierCachedData s3_) {
this.state_0_.set(arg0Value, state_0_.get(arg0Value) & 0xfffffff7 /* remove SpecializationActive[WriteBarrierNode.writeBarrierCached(Node, RubyDynamicObject, int, Shape, ShareObjectNode)] */);
this.writeBarrierCached_cache.set(arg0Value, null);
}
}
@GeneratedBy(WriteBarrierNode.class)
@DenyReplace
private static final class AlreadySharedCachedData implements SpecializationDataNode {
/**
* Source Info:
* Specialization: {@link WriteBarrierNode#alreadySharedCached}
* Parameter: {@link Shape} cachedShape
*/
@CompilationFinal Shape cachedShape_;
AlreadySharedCachedData() {
}
}
@GeneratedBy(WriteBarrierNode.class)
@DenyReplace
private static final class WriteBarrierCachedData extends Node implements SpecializationDataNode {
/**
* Source Info:
* Specialization: {@link WriteBarrierNode#writeBarrierCached}
* Parameter: {@link Shape} cachedShape
*/
@CompilationFinal Shape cachedShape_;
/**
* Source Info:
* Specialization: {@link WriteBarrierNode#writeBarrierCached}
* Parameter: {@link ShareObjectNode} shareObjectNode
*/
@Child ShareObjectNode shareObjectNode_;
@CompilationFinal Assumption assumption0_;
WriteBarrierCachedData() {
}
}
@GeneratedBy(WriteBarrierNode.class)
@DenyReplace
private static final class Uncached extends WriteBarrierNode implements UnadoptableNode {
@TruffleBoundary
@Override
protected void executeInternal(Node arg0Value, Object arg1Value, int arg2Value) {
if ((!(RubyGuards.isRubyDynamicObject(arg1Value))) && (!(WriteBarrierNode.isFinalizer(arg1Value)))) {
WriteBarrierNode.noWriteBarrier(arg0Value, arg1Value, arg2Value);
return;
}
if (arg1Value instanceof RubyDynamicObject) {
RubyDynamicObject arg1Value_ = (RubyDynamicObject) arg1Value;
if ((arg1Value_.getShape().isShared())) {
WriteBarrierNode.alreadySharedUncached(arg1Value_, arg2Value);
return;
}
if ((!(arg1Value_.getShape().isShared()))) {
WriteBarrierNode.writeBarrierUncached(arg0Value, arg1Value_, arg2Value);
return;
}
}
if (arg1Value instanceof FinalizerReference) {
FinalizerReference arg1Value_ = (FinalizerReference) arg1Value;
WriteBarrierNode.writeBarrierFinalizer(arg0Value, arg1Value_, arg2Value);
return;
}
throw newUnsupportedSpecializationException3LLI(this, arg0Value, arg1Value, arg2Value);
}
}
}