Skip to content

Commit

Permalink
8319972: [lworld+vector] Enable intrinsification of Unsafe.finishPriv…
Browse files Browse the repository at this point in the history
…ateBuffer.

Reviewed-by: xgong
  • Loading branch information
Jatin Bhateja committed Nov 22, 2023
1 parent 89a8655 commit 7df90df
Show file tree
Hide file tree
Showing 18 changed files with 109 additions and 121 deletions.
13 changes: 11 additions & 2 deletions src/hotspot/share/opto/graphKit.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,7 @@
#include "utilities/bitMap.inline.hpp"
#include "utilities/powerOfTwo.hpp"
#include "utilities/growableArray.hpp"
#include "classfile/vmSymbols.hpp"

//----------------------------GraphKit-----------------------------------------
// Main utility constructor.
Expand Down Expand Up @@ -1863,7 +1864,6 @@ void GraphKit::set_arguments_for_java_call(CallJavaNode* call, bool is_late_inli
if (t->is_inlinetypeptr() && !call->method()->get_Method()->mismatch() && call->method()->is_scalarized_arg(arg_num)) {
// We don't pass inline type arguments by reference but instead pass each field of the inline type
if (!arg->is_InlineType()) {
assert(_gvn.type(arg)->is_zero_type() && !t->inline_klass()->is_null_free(), "Unexpected argument type");
arg = InlineTypeNode::make_from_oop(this, arg, t->inline_klass(), t->inline_klass()->is_null_free());
}
InlineTypeNode* vt = arg->as_InlineType();
Expand Down Expand Up @@ -1956,6 +1956,15 @@ Node* GraphKit::set_results_for_java_call(CallJavaNode* call, bool separate_io_p
if (type->is_inlinetypeptr()) {
ret = InlineTypeNode::make_from_oop(this, ret, type->inline_klass(), type->inline_klass()->is_null_free());
}
Node* receiver = !call->method()->is_static() ? call->in(TypeFunc::Parms) : nullptr;
if (ret->is_InlineType() &&
receiver && receiver->bottom_type()->isa_instptr() &&
receiver->bottom_type()->is_instptr()->instance_klass()->name()->get_symbol() == vmSymbols::jdk_internal_misc_Unsafe() &&
call->method()->name()->get_symbol() == vmSymbols::makePrivateBuffer_name()) {
// Re-buffer scalarized InlineTypeNodes returned from makePrivateBuffer
// and transition the allocation into larval state.
ret = ret->as_InlineType()->make_larval(this);
}
}
}

Expand Down Expand Up @@ -3416,7 +3425,7 @@ Node* GraphKit::gen_checkcast(Node *obj, Node* superklass, Node* *failure_contro
assert(safe_for_replace, "must be");
obj = null_check(obj);
}
assert(stopped() || !toop->is_inlinetypeptr() || obj->is_InlineType(), "should have been scalarized");
assert(stopped() || !toop->is_inlinetypeptr() || obj->is_InlineType() || obj->bottom_type()->is_inlinetypeptr(), "should have been scalarized");
return obj;
case Compile::SSC_always_false:
if (null_free) {
Expand Down
48 changes: 14 additions & 34 deletions src/hotspot/share/opto/inlinetypenode.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -797,7 +797,7 @@ Node* InlineTypeNode::Ideal(PhaseGVN* phase, bool can_reshape) {
// but later decide to inline the call after the callee code also triggered allocation.
for (DUIterator_Fast imax, i = fast_outs(imax); i < imax; i++) {
AllocateNode* alloc = fast_out(i)->isa_Allocate();
if (alloc != nullptr && alloc->in(AllocateNode::InlineType) == this && !alloc->_is_scalar_replaceable) {
if (alloc != nullptr && alloc->in(AllocateNode::InlineType) == this && !alloc->_is_scalar_replaceable && !alloc->_larval) {
// Found a re-allocation
Node* res = alloc->result_cast();
if (res != nullptr && res->is_CheckCastPP()) {
Expand Down Expand Up @@ -1009,34 +1009,22 @@ InlineTypeNode* InlineTypeNode::make_from_multi(GraphKit* kit, MultiNode* multi,
return kit->gvn().transform(vt)->as_InlineType();
}

InlineTypeNode* InlineTypeNode::make_larval(GraphKit* kit, bool allocate) const {
Node* InlineTypeNode::make_larval(GraphKit* kit) const {
ciInlineKlass* vk = inline_klass();
InlineTypeNode* res = make_uninitialized(kit->gvn(), vk);
for (uint i = 1; i < req(); ++i) {
res->set_req(i, in(i));
}

if (allocate) {
// Re-execute if buffering triggers deoptimization
PreserveReexecuteState preexecs(kit);
kit->jvms()->set_should_reexecute(true);
Node* klass_node = kit->makecon(TypeKlassPtr::make(vk));
Node* alloc_oop = kit->new_instance(klass_node, nullptr, nullptr, true);
AllocateNode* alloc = AllocateNode::Ideal_allocation(alloc_oop);
alloc->_larval = true;
// Re-execute if buffering triggers deoptimization
PreserveReexecuteState preexecs(kit);
kit->jvms()->set_should_reexecute(true);
Node* klass_node = kit->makecon(TypeKlassPtr::make(vk));
Node* alloc_oop = kit->new_instance(klass_node, nullptr, nullptr, true);
AllocateNode* alloc = AllocateNode::Ideal_allocation(alloc_oop);
alloc->_larval = true;

store(kit, alloc_oop, alloc_oop, vk);
res->set_oop(alloc_oop);
}
// TODO 8239003
//res->set_type(TypeInlineType::make(vk, true));
res = kit->gvn().transform(res)->as_InlineType();
assert(!allocate || res->is_allocated(&kit->gvn()), "must be allocated");
return res;
store(kit, alloc_oop, alloc_oop, vk);
return alloc_oop;
}

InlineTypeNode* InlineTypeNode::finish_larval(GraphKit* kit) const {
Node* obj = get_oop();
InlineTypeNode* InlineTypeNode::finish_larval(GraphKit* kit, Node* obj, const TypeInstPtr* vk) {
Node* mark_addr = kit->basic_plus_adr(obj, oopDesc::mark_offset_in_bytes());
Node* mark = kit->make_load(nullptr, mark_addr, TypeX_X, TypeX_X->basic_type(), MemNode::unordered);
mark = kit->gvn().transform(new AndXNode(mark, kit->MakeConX(~markWord::larval_bit_in_place)));
Expand All @@ -1048,15 +1036,7 @@ InlineTypeNode* InlineTypeNode::finish_larval(GraphKit* kit) const {
assert(alloc != nullptr, "must have an allocation node");
kit->insert_mem_bar(Op_MemBarStoreStore, alloc->proj_out_or_null(AllocateNode::RawAddress));

ciInlineKlass* vk = inline_klass();
InlineTypeNode* res = make_uninitialized(kit->gvn(), vk);
for (uint i = 1; i < req(); ++i) {
res->set_req(i, in(i));
}
// TODO 8239003
//res->set_type(TypeInlineType::make(vk, false));
res = kit->gvn().transform(res)->as_InlineType();
return res;
return InlineTypeNode::make_from_oop(kit, obj, vk->inline_klass(), !vk->maybe_null());
}

bool InlineTypeNode::is_larval(PhaseGVN* gvn) const {
Expand Down Expand Up @@ -1281,7 +1261,7 @@ void InlineTypeNode::remove_redundant_allocations(PhaseIdealLoop* phase) {
// will be removed anyway and changing the memory chain will confuse other optimizations.
for (DUIterator_Fast imax, i = fast_outs(imax); i < imax; i++) {
AllocateNode* alloc = fast_out(i)->isa_Allocate();
if (alloc != nullptr && alloc->in(AllocateNode::InlineType) == this && !alloc->_is_scalar_replaceable) {
if (alloc != nullptr && alloc->in(AllocateNode::InlineType) == this && !alloc->_is_scalar_replaceable && !alloc->_larval) {
Node* res = alloc->result_cast();
if (res == nullptr || !res->is_CheckCastPP()) {
break; // No unique CheckCastPP
Expand Down
4 changes: 2 additions & 2 deletions src/hotspot/share/opto/inlinetypenode.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -164,8 +164,8 @@ class InlineTypeNode : public TypeNode {
// Pass inline type as fields at a call or return
void pass_fields(GraphKit* kit, Node* n, uint& base_input, bool in, bool null_free = true);

InlineTypeNode* make_larval(GraphKit* kit, bool allocate) const;
InlineTypeNode* finish_larval(GraphKit* kit) const;
Node* make_larval(GraphKit* kit) const;
static InlineTypeNode* finish_larval(GraphKit* kit, Node* obj, const TypeInstPtr* vk);

// Allocation optimizations
void remove_redundant_allocations(PhaseIdealLoop* phase);
Expand Down
49 changes: 24 additions & 25 deletions src/hotspot/share/opto/library_call.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -2329,6 +2329,17 @@ bool LibraryCallKit::inline_unsafe_access(bool is_store, const BasicType type, c
assert(Unsafe_field_offset_to_byte_offset(11) == 11,
"fieldOffset must be byte-scaled");

if (_gvn.type(base)->is_inlinetypeptr() && is_store) {
// FIXME: Larval bit check is needed to preserve the semantics of value
// objects which can be mutated only if its _larval bit is set. Since
// the oop is not always an AllocateNode, we have to find an utility way
// to check the larval state for all kind of oops.
AllocateNode* alloc = AllocateNode::Ideal_allocation(base);
if (alloc != nullptr) {
assert(alloc->_larval, "InlineType instance must be in _larval state for unsafe put operation.\n");
}
}

ciInlineKlass* inline_klass = nullptr;
if (type == T_PRIMITIVE_OBJECT) {
const TypeInstPtr* cls = _gvn.type(argument(4))->isa_instptr();
Expand All @@ -2349,14 +2360,6 @@ bool LibraryCallKit::inline_unsafe_access(bool is_store, const BasicType type, c
return false;
}
base = vt->get_oop();
// FIXME: Larval bit check is needed to preserve the semantics of value
// objects which can be mutated only if its _larval bit is set. Since
// the oop is not always an AllocateNode, we have to find an utility way
// to check the larval state for all kind of oops.
AllocateNode* alloc = AllocateNode::Ideal_allocation(base);
if (alloc != nullptr) {
assert(alloc->_larval, "InlineType instance must be in _larval state for unsafe put operation.\n");
}
} else {
if (offset->is_Con()) {
long off = find_long_con(offset, 0);
Expand Down Expand Up @@ -2627,11 +2630,6 @@ bool LibraryCallKit::inline_unsafe_access(bool is_store, const BasicType type, c
}
}

if (argument(1)->is_InlineType() && is_store) {
InlineTypeNode* value = InlineTypeNode::make_from_oop(this, base, _gvn.type(argument(1))->inline_klass());
value = value->make_larval(this, false);
replace_in_map(argument(1), value);
}

return true;
}
Expand All @@ -2648,31 +2646,32 @@ bool LibraryCallKit::inline_unsafe_make_private_buffer() {
return true;
}

set_result(value->as_InlineType()->make_larval(this, true));
set_result(value->as_InlineType()->make_larval(this));
return true;
}

bool LibraryCallKit::inline_unsafe_finish_private_buffer() {
Node* receiver = argument(0);
Node* buffer = argument(1);
if (!buffer->is_InlineType()) {
return false;
}
InlineTypeNode* vt = buffer->as_InlineType();
if (!vt->is_allocated(&_gvn) || VectorSupport::is_vector_payload_mf(vt->inline_klass()->get_InlineKlass())) {
return false;

// Incoming value should be a buffer with inline type and not InlineTypeNode.
if (buffer->is_InlineType() || !buffer->bottom_type()->is_inlinetypeptr()) {
return false;
}
// TODO 8239003 Why is this needed?
if (AllocateNode::Ideal_allocation(vt->get_oop()) == nullptr) {

// Allocation node must exist to generate IR for transitioning allocation out
// of larval state. Disable the intrinsic and take unsafe slow path if allocation
// is not reachable, oop returned by Unsafe_finishPrivateBuffer native method
// will automatically rematerialize InlineTypeNode.
if (AllocateNode::Ideal_allocation(buffer) == nullptr) {
return false;
}

receiver = null_check(receiver);
if (stopped()) {
return true;
}

set_result(vt->finish_larval(this));
const TypeInstPtr* ptr = buffer->bottom_type()->isa_instptr();
set_result(InlineTypeNode::finish_larval(this, buffer, ptr));
return true;
}

Expand Down
5 changes: 4 additions & 1 deletion src/hotspot/share/opto/macro.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -958,7 +958,10 @@ bool PhaseMacroExpand::scalar_replacement(AllocateNode *alloc, GrowableArray <Sa
if (res != nullptr) { // Could be null when there are no users
res_type = _igvn.type(res)->isa_oopptr();
}

// Bufferes in larval state should not be scalarized.
if (alloc->_larval) {
return false;
}
// Process the safepoint uses
assert(safepoints.length() == 0 || !res_type->is_inlinetypeptr(), "Inline type allocations should not have safepoint uses");
Unique_Node_List value_worklist;
Expand Down
13 changes: 8 additions & 5 deletions src/hotspot/share/opto/parse1.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -1777,9 +1777,12 @@ void Parse::merge_common(Parse::Block* target, int pnum) {
// Allocate inline type in src block to be able to merge it with oop in target block
map()->set_req(j, n->as_InlineType()->buffer(this));
} else if (!n->is_InlineType() && t->is_inlinetypeptr()) {
// Scalarize null in src block to be able to merge it with inline type in target block
assert(gvn().type(n)->is_zero_type(), "Should have been scalarized");
map()->set_req(j, InlineTypeNode::make_null(gvn(), t->inline_klass()));
AllocateNode* alloc = AllocateNode::Ideal_allocation(n);
if (alloc == nullptr || !alloc->_larval) {
// Scalarize null in src block to be able to merge it with inline type in target block
assert(gvn().type(n)->is_zero_type(), "Should have been scalarized");
map()->set_req(j, InlineTypeNode::make_null(gvn(), t->inline_klass()));
}
}
}
}
Expand Down Expand Up @@ -1884,7 +1887,7 @@ void Parse::merge_common(Parse::Block* target, int pnum) {
PhiNode* phi;
if (m->is_Phi() && m->as_Phi()->region() == r) {
phi = m->as_Phi();
} else if (m->is_InlineType() && m->as_InlineType()->has_phi_inputs(r)) {
} else if (m->is_InlineType() && n->is_InlineType() && m->as_InlineType()->has_phi_inputs(r)) {
phi = m->as_InlineType()->get_oop()->as_Phi();
} else {
phi = nullptr;
Expand Down Expand Up @@ -1923,7 +1926,7 @@ void Parse::merge_common(Parse::Block* target, int pnum) {
// It is a bug if we create a phi which sees a garbage value on a live path.

// Merging two inline types?
if (phi != nullptr && phi->bottom_type()->is_inlinetypeptr()) {
if (phi != nullptr && phi->bottom_type()->is_inlinetypeptr() && m->is_InlineType() && n->is_InlineType()) {
// Reload current state because it may have been updated by ensure_phi
m = map()->in(j);
InlineTypeNode* vtm = m->as_InlineType(); // Current inline type
Expand Down
Loading

0 comments on commit 7df90df

Please sign in to comment.