blob: fda9e9a4b8d64c023a3aa98874463b02358a63f2 [file] [log] [blame]
// Copyright 2012 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//
// Review notes:
//
// - The use of macros in these inline functions may seem superfluous
// but it is absolutely needed to make sure gcc generates optimal
// code. gcc is not happy when attempting to inline too deep.
//
#ifndef V8_OBJECTS_INL_H_
#define V8_OBJECTS_INL_H_
#include "src/objects.h"
#include "src/base/bits.h"
#include "src/base/tsan.h"
#include "src/builtins/builtins.h"
#include "src/contexts-inl.h"
#include "src/conversions-inl.h"
#include "src/feedback-vector-inl.h"
#include "src/field-index-inl.h"
#include "src/handles-inl.h"
#include "src/heap/factory.h"
#include "src/heap/heap-inl.h" // crbug.com/v8/8499
#include "src/isolate-inl.h"
#include "src/keys.h"
#include "src/layout-descriptor-inl.h"
#include "src/lookup-cache-inl.h"
#include "src/lookup-inl.h"
#include "src/maybe-handles-inl.h"
#include "src/objects/bigint.h"
#include "src/objects/descriptor-array-inl.h"
#include "src/objects/embedder-data-array-inl.h"
#include "src/objects/free-space-inl.h"
#include "src/objects/heap-number-inl.h"
#include "src/objects/heap-object.h" // TODO(jkummerow): See below [1].
#include "src/objects/js-proxy-inl.h"
#include "src/objects/literal-objects.h"
#include "src/objects/maybe-object-inl.h"
#include "src/objects/oddball-inl.h"
#include "src/objects/ordered-hash-table-inl.h"
#include "src/objects/regexp-match-info.h"
#include "src/objects/scope-info.h"
#include "src/objects/slots-inl.h"
#include "src/objects/smi-inl.h"
#include "src/objects/template-objects.h"
#include "src/objects/templates.h"
#include "src/property-details.h"
#include "src/property.h"
#include "src/prototype-inl.h"
#include "src/roots-inl.h"
#include "src/transitions-inl.h"
#include "src/v8memory.h"
// [1] This file currently contains the definitions of many
// HeapObject::IsFoo() predicates, which in turn require #including
// many other -inl.h files. Find a way to avoid this. Idea:
// Since e.g. HeapObject::IsSeqString requires things from string-inl.h,
// and presumably is mostly used from places that require/include string-inl.h
// anyway, maybe that's where it should be defined?
// Has to be the last include (doesn't have include guards):
#include "src/objects/object-macros.h"
namespace v8 {
namespace internal {
PropertyDetails::PropertyDetails(Smi smi) { value_ = smi->value(); }
Smi PropertyDetails::AsSmi() const {
// Ensure the upper 2 bits have the same value by sign extending it. This is
// necessary to be able to use the 31st bit of the property details.
int value = value_ << 1;
return Smi::FromInt(value >> 1);
}
int PropertyDetails::field_width_in_words() const {
DCHECK_EQ(location(), kField);
if (!FLAG_unbox_double_fields) return 1;
if (kDoubleSize == kPointerSize) return 1;
return representation().IsDouble() ? kDoubleSize / kPointerSize : 1;
}
bool HeapObject::IsUncompiledData() const {
return IsUncompiledDataWithoutPreParsedScope() ||
IsUncompiledDataWithPreParsedScope();
}
bool HeapObject::IsSloppyArgumentsElements() const {
return IsFixedArrayExact();
}
bool HeapObject::IsJSSloppyArgumentsObject() const {
return IsJSArgumentsObject();
}
bool HeapObject::IsJSGeneratorObject() const {
return map()->instance_type() == JS_GENERATOR_OBJECT_TYPE ||
IsJSAsyncFunctionObject() || IsJSAsyncGeneratorObject();
}
bool HeapObject::IsDataHandler() const {
return IsLoadHandler() || IsStoreHandler();
}
bool HeapObject::IsClassBoilerplate() const { return IsFixedArrayExact(); }
bool HeapObject::IsExternal(Isolate* isolate) const {
return map()->FindRootMap(isolate) == isolate->heap()->external_map();
}
#define IS_TYPE_FUNCTION_DEF(type_) \
bool Object::Is##type_() const { \
return IsHeapObject() && HeapObject::cast(*this)->Is##type_(); \
}
HEAP_OBJECT_TYPE_LIST(IS_TYPE_FUNCTION_DEF)
#undef IS_TYPE_FUNCTION_DEF
#define IS_TYPE_FUNCTION_DEF(Type, Value) \
bool Object::Is##Type(Isolate* isolate) const { \
return Is##Type(ReadOnlyRoots(isolate->heap())); \
} \
bool Object::Is##Type(ReadOnlyRoots roots) const { \
return *this == roots.Value(); \
} \
bool Object::Is##Type() const { \
return IsHeapObject() && HeapObject::cast(*this)->Is##Type(); \
} \
bool HeapObject::Is##Type(Isolate* isolate) const { \
return Object::Is##Type(isolate); \
} \
bool HeapObject::Is##Type(ReadOnlyRoots roots) const { \
return Object::Is##Type(roots); \
} \
bool HeapObject::Is##Type() const { return Is##Type(GetReadOnlyRoots()); }
ODDBALL_LIST(IS_TYPE_FUNCTION_DEF)
#undef IS_TYPE_FUNCTION_DEF
bool Object::IsNullOrUndefined(Isolate* isolate) const {
return IsNullOrUndefined(ReadOnlyRoots(isolate));
}
bool Object::IsNullOrUndefined(ReadOnlyRoots roots) const {
return IsNull(roots) || IsUndefined(roots);
}
bool Object::IsNullOrUndefined() const {
return IsHeapObject() && HeapObject::cast(*this)->IsNullOrUndefined();
}
bool HeapObject::IsNullOrUndefined(Isolate* isolate) const {
return Object::IsNullOrUndefined(isolate);
}
bool HeapObject::IsNullOrUndefined(ReadOnlyRoots roots) const {
return Object::IsNullOrUndefined(roots);
}
bool HeapObject::IsNullOrUndefined() const {
return IsNullOrUndefined(GetReadOnlyRoots());
}
bool HeapObject::IsUniqueName() const {
return IsInternalizedString() || IsSymbol();
}
bool HeapObject::IsFunction() const {
STATIC_ASSERT(LAST_FUNCTION_TYPE == LAST_TYPE);
return map()->instance_type() >= FIRST_FUNCTION_TYPE;
}
bool HeapObject::IsCallable() const { return map()->is_callable(); }
bool HeapObject::IsConstructor() const { return map()->is_constructor(); }
bool HeapObject::IsModuleInfo() const {
return map() == GetReadOnlyRoots().module_info_map();
}
bool HeapObject::IsTemplateInfo() const {
return IsObjectTemplateInfo() || IsFunctionTemplateInfo();
}
bool HeapObject::IsConsString() const {
if (!IsString()) return false;
return StringShape(String::cast(*this)).IsCons();
}
bool HeapObject::IsThinString() const {
if (!IsString()) return false;
return StringShape(String::cast(*this)).IsThin();
}
bool HeapObject::IsSlicedString() const {
if (!IsString()) return false;
return StringShape(String::cast(*this)).IsSliced();
}
bool HeapObject::IsSeqString() const {
if (!IsString()) return false;
return StringShape(String::cast(*this)).IsSequential();
}
bool HeapObject::IsSeqOneByteString() const {
if (!IsString()) return false;
return StringShape(String::cast(*this)).IsSequential() &&
String::cast(*this)->IsOneByteRepresentation();
}
bool HeapObject::IsSeqTwoByteString() const {
if (!IsString()) return false;
return StringShape(String::cast(*this)).IsSequential() &&
String::cast(*this)->IsTwoByteRepresentation();
}
bool HeapObject::IsExternalString() const {
if (!IsString()) return false;
return StringShape(String::cast(*this)).IsExternal();
}
bool HeapObject::IsExternalOneByteString() const {
if (!IsString()) return false;
return StringShape(String::cast(*this)).IsExternal() &&
String::cast(*this)->IsOneByteRepresentation();
}
bool HeapObject::IsExternalTwoByteString() const {
if (!IsString()) return false;
return StringShape(String::cast(*this)).IsExternal() &&
String::cast(*this)->IsTwoByteRepresentation();
}
bool Object::IsNumber() const { return IsSmi() || IsHeapNumber(); }
bool Object::IsNumeric() const { return IsNumber() || IsBigInt(); }
bool HeapObject::IsFiller() const {
InstanceType instance_type = map()->instance_type();
return instance_type == FREE_SPACE_TYPE || instance_type == FILLER_TYPE;
}
bool HeapObject::IsJSWeakCollection() const {
return IsJSWeakMap() || IsJSWeakSet();
}
bool HeapObject::IsJSCollection() const { return IsJSMap() || IsJSSet(); }
bool HeapObject::IsPromiseReactionJobTask() const {
return IsPromiseFulfillReactionJobTask() || IsPromiseRejectReactionJobTask();
}
bool HeapObject::IsEnumCache() const { return IsTuple2(); }
bool HeapObject::IsFrameArray() const { return IsFixedArrayExact(); }
bool HeapObject::IsArrayList() const {
return map() == GetReadOnlyRoots().array_list_map() ||
*this == GetReadOnlyRoots().empty_fixed_array();
}
bool HeapObject::IsRegExpMatchInfo() const { return IsFixedArrayExact(); }
bool Object::IsLayoutDescriptor() const { return IsSmi() || IsByteArray(); }
bool HeapObject::IsDeoptimizationData() const {
// Must be a fixed array.
if (!IsFixedArrayExact()) return false;
// There's no sure way to detect the difference between a fixed array and
// a deoptimization data array. Since this is used for asserts we can
// check that the length is zero or else the fixed size plus a multiple of
// the entry size.
int length = FixedArray::cast(*this)->length();
if (length == 0) return true;
length -= DeoptimizationData::kFirstDeoptEntryIndex;
return length >= 0 && length % DeoptimizationData::kDeoptEntrySize == 0;
}
bool HeapObject::IsHandlerTable() const {
if (!IsFixedArrayExact()) return false;
// There's actually no way to see the difference between a fixed array and
// a handler table array.
return true;
}
bool HeapObject::IsTemplateList() const {
if (!IsFixedArrayExact()) return false;
// There's actually no way to see the difference between a fixed array and
// a template list.
if (FixedArray::cast(*this)->length() < 1) return false;
return true;
}
bool HeapObject::IsDependentCode() const {
if (!IsWeakFixedArray()) return false;
// There's actually no way to see the difference between a weak fixed array
// and a dependent codes array.
return true;
}
bool HeapObject::IsAbstractCode() const {
return IsBytecodeArray() || IsCode();
}
bool HeapObject::IsStringWrapper() const {
return IsJSValue() && JSValue::cast(*this)->value()->IsString();
}
bool HeapObject::IsBooleanWrapper() const {
return IsJSValue() && JSValue::cast(*this)->value()->IsBoolean();
}
bool HeapObject::IsScriptWrapper() const {
return IsJSValue() && JSValue::cast(*this)->value()->IsScript();
}
bool HeapObject::IsNumberWrapper() const {
return IsJSValue() && JSValue::cast(*this)->value()->IsNumber();
}
bool HeapObject::IsBigIntWrapper() const {
return IsJSValue() && JSValue::cast(*this)->value()->IsBigInt();
}
bool HeapObject::IsSymbolWrapper() const {
return IsJSValue() && JSValue::cast(*this)->value()->IsSymbol();
}
bool HeapObject::IsBoolean() const {
return IsOddball() &&
((Oddball::cast(*this)->kind() & Oddball::kNotBooleanMask) == 0);
}
bool HeapObject::IsJSArrayBufferView() const {
return IsJSDataView() || IsJSTypedArray();
}
bool HeapObject::IsStringSet() const { return IsHashTable(); }
bool HeapObject::IsObjectHashSet() const { return IsHashTable(); }
bool HeapObject::IsNormalizedMapCache() const {
return NormalizedMapCache::IsNormalizedMapCache(*this);
}
bool HeapObject::IsCompilationCacheTable() const { return IsHashTable(); }
bool HeapObject::IsMapCache() const { return IsHashTable(); }
bool HeapObject::IsObjectHashTable() const { return IsHashTable(); }
bool Object::IsHashTableBase() const { return IsHashTable(); }
bool Object::IsSmallOrderedHashTable() const {
return IsSmallOrderedHashSet() || IsSmallOrderedHashMap() ||
IsSmallOrderedNameDictionary();
}
bool Object::IsPrimitive() const {
return IsSmi() || HeapObject::cast(*this)->map()->IsPrimitiveMap();
}
// static
Maybe<bool> Object::IsArray(Handle<Object> object) {
if (object->IsSmi()) return Just(false);
Handle<HeapObject> heap_object = Handle<HeapObject>::cast(object);
if (heap_object->IsJSArray()) return Just(true);
if (!heap_object->IsJSProxy()) return Just(false);
return JSProxy::IsArray(Handle<JSProxy>::cast(object));
}
bool HeapObject::IsUndetectable() const { return map()->is_undetectable(); }
bool HeapObject::IsAccessCheckNeeded() const {
if (IsJSGlobalProxy()) {
const JSGlobalProxy proxy = JSGlobalProxy::cast(*this);
JSGlobalObject global = proxy->GetIsolate()->context()->global_object();
return proxy->IsDetachedFrom(global);
}
return map()->is_access_check_needed();
}
bool HeapObject::IsStruct() const {
switch (map()->instance_type()) {
#define MAKE_STRUCT_CASE(TYPE, Name, name) \
case TYPE: \
return true;
STRUCT_LIST(MAKE_STRUCT_CASE)
#undef MAKE_STRUCT_CASE
// It is hard to include ALLOCATION_SITE_TYPE in STRUCT_LIST because
// that macro is used for many things and AllocationSite needs a few
// special cases.
case ALLOCATION_SITE_TYPE:
return true;
case LOAD_HANDLER_TYPE:
case STORE_HANDLER_TYPE:
return true;
case FEEDBACK_CELL_TYPE:
return true;
case CALL_HANDLER_INFO_TYPE:
return true;
default:
return false;
}
}
#define MAKE_STRUCT_PREDICATE(NAME, Name, name) \
bool Object::Is##Name() const { \
return IsHeapObject() && HeapObject::cast(*this)->Is##Name(); \
} \
TYPE_CHECKER(Name)
STRUCT_LIST(MAKE_STRUCT_PREDICATE)
#undef MAKE_STRUCT_PREDICATE
double Object::Number() const {
DCHECK(IsNumber());
return IsSmi() ? static_cast<double>(Smi(this->ptr())->value())
: HeapNumber::unchecked_cast(*this)->value();
}
bool Object::IsNaN() const {
return this->IsHeapNumber() && std::isnan(HeapNumber::cast(*this)->value());
}
bool Object::IsMinusZero() const {
return this->IsHeapNumber() &&
i::IsMinusZero(HeapNumber::cast(*this)->value());
}
OBJECT_CONSTRUCTORS_IMPL(HeapObject, ObjectPtr)
OBJECT_CONSTRUCTORS_IMPL(HashTableBase, FixedArray)
template <typename Derived, typename Shape>
HashTable<Derived, Shape>::HashTable(Address ptr) : HashTableBase(ptr) {
SLOW_DCHECK(IsHashTable());
}
template <typename Derived, typename Shape>
ObjectHashTableBase<Derived, Shape>::ObjectHashTableBase(Address ptr)
: HashTable<Derived, Shape>(ptr) {}
ObjectHashTable::ObjectHashTable(Address ptr)
: ObjectHashTableBase<ObjectHashTable, ObjectHashTableShape>(ptr) {
SLOW_DCHECK(IsObjectHashTable());
}
EphemeronHashTable::EphemeronHashTable(Address ptr)
: ObjectHashTableBase<EphemeronHashTable, EphemeronHashTableShape>(ptr) {
SLOW_DCHECK(IsEphemeronHashTable());
}
ObjectHashSet::ObjectHashSet(Address ptr)
: HashTable<ObjectHashSet, ObjectHashSetShape>(ptr) {
SLOW_DCHECK(IsObjectHashSet());
}
OBJECT_CONSTRUCTORS_IMPL(RegExpMatchInfo, FixedArray)
OBJECT_CONSTRUCTORS_IMPL(ScopeInfo, FixedArray)
NormalizedMapCache::NormalizedMapCache(Address ptr) : WeakFixedArray(ptr) {
// TODO(jkummerow): Introduce IsNormalizedMapCache() and use
// OBJECT_CONSTRUCTORS_IMPL macro?
}
OBJECT_CONSTRUCTORS_IMPL(BigIntBase, HeapObject)
OBJECT_CONSTRUCTORS_IMPL(BigInt, BigIntBase)
OBJECT_CONSTRUCTORS_IMPL(FreshlyAllocatedBigInt, BigIntBase)
OBJECT_CONSTRUCTORS_IMPL(TemplateObjectDescription, Tuple2)
// ------------------------------------
// Cast operations
CAST_ACCESSOR2(BigInt)
CAST_ACCESSOR2(ObjectBoilerplateDescription)
CAST_ACCESSOR2(EphemeronHashTable)
CAST_ACCESSOR2(HeapObject)
CAST_ACCESSOR2(NormalizedMapCache)
CAST_ACCESSOR2(Object)
CAST_ACCESSOR2(ObjectHashSet)
CAST_ACCESSOR2(ObjectHashTable)
CAST_ACCESSOR2(RegExpMatchInfo)
CAST_ACCESSOR2(ScopeInfo)
CAST_ACCESSOR2(TemplateObjectDescription)
bool Object::HasValidElements() {
// Dictionary is covered under FixedArray.
return IsFixedArray() || IsFixedDoubleArray() || IsFixedTypedArrayBase();
}
bool Object::KeyEquals(Object second) {
Object first = *this;
if (second->IsNumber()) {
if (first->IsNumber()) return first->Number() == second->Number();
Object temp = first;
first = second;
second = temp;
}
if (first->IsNumber()) {
DCHECK_LE(0, first->Number());
uint32_t expected = static_cast<uint32_t>(first->Number());
uint32_t index;
return Name::cast(second)->AsArrayIndex(&index) && index == expected;
}
return Name::cast(first)->Equals(Name::cast(second));
}
bool Object::FilterKey(PropertyFilter filter) {
DCHECK(!IsPropertyCell());
if (IsSymbol()) {
if (filter & SKIP_SYMBOLS) return true;
if (Symbol::cast(*this)->is_private()) return true;
} else {
if (filter & SKIP_STRINGS) return true;
}
return false;
}
Handle<Object> Object::NewStorageFor(Isolate* isolate, Handle<Object> object,
Representation representation) {
if (!representation.IsDouble()) return object;
auto result = isolate->factory()->NewMutableHeapNumberWithHoleNaN();
if (object->IsUninitialized(isolate)) {
result->set_value_as_bits(kHoleNanInt64);
} else if (object->IsMutableHeapNumber()) {
// Ensure that all bits of the double value are preserved.
result->set_value_as_bits(
MutableHeapNumber::cast(*object)->value_as_bits());
} else {
result->set_value(object->Number());
}
return result;
}
Handle<Object> Object::WrapForRead(Isolate* isolate, Handle<Object> object,
Representation representation) {
DCHECK(!object->IsUninitialized(isolate));
if (!representation.IsDouble()) {
DCHECK(object->FitsRepresentation(representation));
return object;
}
return isolate->factory()->NewHeapNumber(
MutableHeapNumber::cast(*object)->value());
}
Representation Object::OptimalRepresentation() {
if (!FLAG_track_fields) return Representation::Tagged();
if (IsSmi()) {
return Representation::Smi();
} else if (FLAG_track_double_fields && IsHeapNumber()) {
return Representation::Double();
} else if (FLAG_track_computed_fields && IsUninitialized()) {
return Representation::None();
} else if (FLAG_track_heap_object_fields) {
DCHECK(IsHeapObject());
return Representation::HeapObject();
} else {
return Representation::Tagged();
}
}
ElementsKind Object::OptimalElementsKind() {
if (IsSmi()) return PACKED_SMI_ELEMENTS;
if (IsNumber()) return PACKED_DOUBLE_ELEMENTS;
return PACKED_ELEMENTS;
}
bool Object::FitsRepresentation(Representation representation) {
if (FLAG_track_fields && representation.IsSmi()) {
return IsSmi();
} else if (FLAG_track_double_fields && representation.IsDouble()) {
return IsMutableHeapNumber() || IsNumber();
} else if (FLAG_track_heap_object_fields && representation.IsHeapObject()) {
return IsHeapObject();
} else if (FLAG_track_fields && representation.IsNone()) {
return false;
}
return true;
}
bool Object::ToUint32(uint32_t* value) const {
if (IsSmi()) {
int num = Smi::ToInt(*this);
if (num < 0) return false;
*value = static_cast<uint32_t>(num);
return true;
}
if (IsHeapNumber()) {
double num = HeapNumber::cast(*this)->value();
return DoubleToUint32IfEqualToSelf(num, value);
}
return false;
}
// static
MaybeHandle<JSReceiver> Object::ToObject(Isolate* isolate,
Handle<Object> object,
const char* method_name) {
if (object->IsJSReceiver()) return Handle<JSReceiver>::cast(object);
return ToObject(isolate, object, isolate->native_context(), method_name);
}
// static
MaybeHandle<Name> Object::ToName(Isolate* isolate, Handle<Object> input) {
if (input->IsName()) return Handle<Name>::cast(input);
return ConvertToName(isolate, input);
}
// static
MaybeHandle<Object> Object::ToPropertyKey(Isolate* isolate,
Handle<Object> value) {
if (value->IsSmi() || HeapObject::cast(*value)->IsName()) return value;
return ConvertToPropertyKey(isolate, value);
}
// static
MaybeHandle<Object> Object::ToPrimitive(Handle<Object> input,
ToPrimitiveHint hint) {
if (input->IsPrimitive()) return input;
return JSReceiver::ToPrimitive(Handle<JSReceiver>::cast(input), hint);
}
// static
MaybeHandle<Object> Object::ToNumber(Isolate* isolate, Handle<Object> input) {
if (input->IsNumber()) return input; // Shortcut.
return ConvertToNumberOrNumeric(isolate, input, Conversion::kToNumber);
}
// static
MaybeHandle<Object> Object::ToNumeric(Isolate* isolate, Handle<Object> input) {
if (input->IsNumber() || input->IsBigInt()) return input; // Shortcut.
return ConvertToNumberOrNumeric(isolate, input, Conversion::kToNumeric);
}
// static
MaybeHandle<Object> Object::ToInteger(Isolate* isolate, Handle<Object> input) {
if (input->IsSmi()) return input;
return ConvertToInteger(isolate, input);
}
// static
MaybeHandle<Object> Object::ToInt32(Isolate* isolate, Handle<Object> input) {
if (input->IsSmi()) return input;
return ConvertToInt32(isolate, input);
}
// static
MaybeHandle<Object> Object::ToUint32(Isolate* isolate, Handle<Object> input) {
if (input->IsSmi()) return handle(Smi::cast(*input)->ToUint32Smi(), isolate);
return ConvertToUint32(isolate, input);
}
// static
MaybeHandle<String> Object::ToString(Isolate* isolate, Handle<Object> input) {
if (input->IsString()) return Handle<String>::cast(input);
return ConvertToString(isolate, input);
}
// static
MaybeHandle<Object> Object::ToLength(Isolate* isolate, Handle<Object> input) {
if (input->IsSmi()) {
int value = std::max(Smi::ToInt(*input), 0);
return handle(Smi::FromInt(value), isolate);
}
return ConvertToLength(isolate, input);
}
// static
MaybeHandle<Object> Object::ToIndex(Isolate* isolate, Handle<Object> input,
MessageTemplate error_index) {
if (input->IsSmi() && Smi::ToInt(*input) >= 0) return input;
return ConvertToIndex(isolate, input, error_index);
}
MaybeHandle<Object> Object::GetProperty(Isolate* isolate, Handle<Object> object,
Handle<Name> name) {
LookupIterator it(isolate, object, name);
if (!it.IsFound()) return it.factory()->undefined_value();
return GetProperty(&it);
}
MaybeHandle<Object> Object::GetElement(Isolate* isolate, Handle<Object> object,
uint32_t index) {
LookupIterator it(isolate, object, index);
if (!it.IsFound()) return it.factory()->undefined_value();
return GetProperty(&it);
}
MaybeHandle<Object> Object::SetElement(Isolate* isolate, Handle<Object> object,
uint32_t index, Handle<Object> value,
LanguageMode language_mode) {
LookupIterator it(isolate, object, index);
MAYBE_RETURN_NULL(
SetProperty(&it, value, language_mode, StoreOrigin::kMaybeKeyed));
return value;
}
ObjectSlot HeapObject::RawField(int byte_offset) const {
return ObjectSlot(FIELD_ADDR(this, byte_offset));
}
ObjectSlot HeapObject::RawField(const HeapObject obj, int byte_offset) {
return ObjectSlot(FIELD_ADDR(obj, byte_offset));
}
MaybeObjectSlot HeapObject::RawMaybeWeakField(int byte_offset) const {
return MaybeObjectSlot(FIELD_ADDR(this, byte_offset));
}
MaybeObjectSlot HeapObject::RawMaybeWeakField(HeapObject obj, int byte_offset) {
return MaybeObjectSlot(FIELD_ADDR(obj, byte_offset));
}
MapWord MapWord::FromMap(const Map map) { return MapWord(map.ptr()); }
Map MapWord::ToMap() const { return Map::unchecked_cast(ObjectPtr(value_)); }
bool MapWord::IsForwardingAddress() const { return HAS_SMI_TAG(value_); }
MapWord MapWord::FromForwardingAddress(HeapObject object) {
return MapWord(object->ptr() - kHeapObjectTag);
}
HeapObject MapWord::ToForwardingAddress() {
DCHECK(IsForwardingAddress());
return HeapObject::FromAddress(value_);
}
#ifdef VERIFY_HEAP
void HeapObject::VerifyObjectField(Isolate* isolate, int offset) {
VerifyPointer(isolate, READ_FIELD(this, offset));
}
void HeapObject::VerifyMaybeObjectField(Isolate* isolate, int offset) {
MaybeObject::VerifyMaybeObjectPointer(isolate, READ_WEAK_FIELD(this, offset));
}
void HeapObject::VerifySmiField(int offset) {
CHECK(READ_FIELD(this, offset)->IsSmi());
}
#endif
ReadOnlyRoots HeapObject::GetReadOnlyRoots() const {
// TODO(v8:7464): When RO_SPACE is embedded, this will access a global
// variable instead.
return ReadOnlyRoots(MemoryChunk::FromHeapObject(*this)->heap());
}
Heap* NeverReadOnlySpaceObject::GetHeap() const {
MemoryChunk* chunk =
MemoryChunk::FromAddress(reinterpret_cast<Address>(this));
// Make sure we are not accessing an object in RO space.
SLOW_DCHECK(chunk->owner()->identity() != RO_SPACE);
Heap* heap = chunk->heap();
SLOW_DCHECK(heap != nullptr);
return heap;
}
Isolate* NeverReadOnlySpaceObject::GetIsolate() const {
return GetHeap()->isolate();
}
Map HeapObject::map() const { return map_word().ToMap(); }
void HeapObject::set_map(Map value) {
if (!value.is_null()) {
#ifdef VERIFY_HEAP
Heap::FromWritableHeapObject(this)->VerifyObjectLayoutChange(*this, value);
#endif
}
set_map_word(MapWord::FromMap(value));
if (!value.is_null()) {
// TODO(1600) We are passing kNullAddress as a slot because maps can never
// be on an evacuation candidate.
MarkingBarrier(this, ObjectSlot(kNullAddress), value);
}
}
Map HeapObject::synchronized_map() const {
return synchronized_map_word().ToMap();
}
void HeapObject::synchronized_set_map(Map value) {
if (!value.is_null()) {
#ifdef VERIFY_HEAP
Heap::FromWritableHeapObject(this)->VerifyObjectLayoutChange(*this, value);
#endif
}
synchronized_set_map_word(MapWord::FromMap(value));
if (!value.is_null()) {
// TODO(1600) We are passing kNullAddress as a slot because maps can never
// be on an evacuation candidate.
MarkingBarrier(this, ObjectSlot(kNullAddress), value);
}
}
// Unsafe accessor omitting write barrier.
void HeapObject::set_map_no_write_barrier(Map value) {
if (!value.is_null()) {
#ifdef VERIFY_HEAP
Heap::FromWritableHeapObject(this)->VerifyObjectLayoutChange(*this, value);
#endif
}
set_map_word(MapWord::FromMap(value));
}
void HeapObject::set_map_after_allocation(Map value, WriteBarrierMode mode) {
set_map_word(MapWord::FromMap(value));
if (mode != SKIP_WRITE_BARRIER) {
DCHECK(!value.is_null());
// TODO(1600) We are passing kNullAddress as a slot because maps can never
// be on an evacuation candidate.
MarkingBarrier(this, ObjectSlot(kNullAddress), value);
}
}
MapWordSlot HeapObject::map_slot() const {
return MapWordSlot(FIELD_ADDR(this, kMapOffset));
}
MapWord HeapObject::map_word() const {
return MapWord(map_slot().Relaxed_Load().ptr());
}
void HeapObject::set_map_word(MapWord map_word) {
map_slot().Relaxed_Store(ObjectPtr(map_word.value_));
}
MapWord HeapObject::synchronized_map_word() const {
return MapWord(map_slot().Acquire_Load().ptr());
}
void HeapObject::synchronized_set_map_word(MapWord map_word) {
map_slot().Release_Store(ObjectPtr(map_word.value_));
}
int HeapObject::Size() const { return SizeFromMap(map()); }
inline bool IsSpecialReceiverInstanceType(InstanceType instance_type) {
return instance_type <= LAST_SPECIAL_RECEIVER_TYPE;
}
// This should be in objects/map-inl.h, but can't, because of a cyclic
// dependency.
bool Map::IsSpecialReceiverMap() const {
bool result = IsSpecialReceiverInstanceType(instance_type());
DCHECK_IMPLIES(!result,
!has_named_interceptor() && !is_access_check_needed());
return result;
}
inline bool IsCustomElementsReceiverInstanceType(InstanceType instance_type) {
return instance_type <= LAST_CUSTOM_ELEMENTS_RECEIVER;
}
// This should be in objects/map-inl.h, but can't, because of a cyclic
// dependency.
bool Map::IsCustomElementsReceiverMap() const {
return IsCustomElementsReceiverInstanceType(instance_type());
}
bool Object::ToArrayLength(uint32_t* index) const {
return Object::ToUint32(index);
}
bool Object::ToArrayIndex(uint32_t* index) const {
return Object::ToUint32(index) && *index != kMaxUInt32;
}
bool Object::GetHeapObjectIfStrong(HeapObject* result) const {
return GetHeapObject(result);
}
bool Object::GetHeapObject(HeapObject* result) const {
if (!IsHeapObject()) return false;
*result = HeapObject::cast(*this);
return true;
}
HeapObject Object::GetHeapObject() const {
DCHECK(IsHeapObject());
return HeapObject::cast(*this);
}
void Object::VerifyApiCallResultType() {
#if DEBUG
if (IsSmi()) return;
DCHECK(IsHeapObject());
if (!(IsString() || IsSymbol() || IsJSReceiver() || IsHeapNumber() ||
IsBigInt() || IsUndefined() || IsTrue() || IsFalse() || IsNull())) {
FATAL("API call returned invalid object");
}
#endif // DEBUG
}
int RegExpMatchInfo::NumberOfCaptureRegisters() {
DCHECK_GE(length(), kLastMatchOverhead);
Object obj = get(kNumberOfCapturesIndex);
return Smi::ToInt(obj);
}
void RegExpMatchInfo::SetNumberOfCaptureRegisters(int value) {
DCHECK_GE(length(), kLastMatchOverhead);
set(kNumberOfCapturesIndex, Smi::FromInt(value));
}
String RegExpMatchInfo::LastSubject() {
DCHECK_GE(length(), kLastMatchOverhead);
return String::cast(get(kLastSubjectIndex));
}
void RegExpMatchInfo::SetLastSubject(String value) {
DCHECK_GE(length(), kLastMatchOverhead);
set(kLastSubjectIndex, value);
}
Object RegExpMatchInfo::LastInput() {
DCHECK_GE(length(), kLastMatchOverhead);
return get(kLastInputIndex);
}
void RegExpMatchInfo::SetLastInput(Object value) {
DCHECK_GE(length(), kLastMatchOverhead);
set(kLastInputIndex, value);
}
int RegExpMatchInfo::Capture(int i) {
DCHECK_LT(i, NumberOfCaptureRegisters());
Object obj = get(kFirstCaptureIndex + i);
return Smi::ToInt(obj);
}
void RegExpMatchInfo::SetCapture(int i, int value) {
DCHECK_LT(i, NumberOfCaptureRegisters());
set(kFirstCaptureIndex + i, Smi::FromInt(value));
}
WriteBarrierMode HeapObject::GetWriteBarrierMode(
const DisallowHeapAllocation& promise) {
Heap* heap = Heap::FromWritableHeapObject(this);
if (heap->incremental_marking()->IsMarking()) return UPDATE_WRITE_BARRIER;
if (Heap::InNewSpace(*this)) return SKIP_WRITE_BARRIER;
return UPDATE_WRITE_BARRIER;
}
AllocationAlignment HeapObject::RequiredAlignment(Map map) {
#ifdef V8_HOST_ARCH_32_BIT
int instance_type = map->instance_type();
if (instance_type == FIXED_FLOAT64_ARRAY_TYPE ||
instance_type == FIXED_DOUBLE_ARRAY_TYPE) {
return kDoubleAligned;
}
if (instance_type == HEAP_NUMBER_TYPE) return kDoubleUnaligned;
#endif // V8_HOST_ARCH_32_BIT
return kWordAligned;
}
bool HeapObject::NeedsRehashing() const {
switch (map()->instance_type()) {
case DESCRIPTOR_ARRAY_TYPE:
return DescriptorArray::cast(*this)->number_of_descriptors() > 1;
case TRANSITION_ARRAY_TYPE:
return TransitionArray::cast(*this)->number_of_entries() > 1;
case ORDERED_HASH_MAP_TYPE:
return OrderedHashMap::cast(*this)->NumberOfElements() > 0;
case ORDERED_HASH_SET_TYPE:
return OrderedHashSet::cast(*this)->NumberOfElements() > 0;
case NAME_DICTIONARY_TYPE:
case GLOBAL_DICTIONARY_TYPE:
case NUMBER_DICTIONARY_TYPE:
case SIMPLE_NUMBER_DICTIONARY_TYPE:
case STRING_TABLE_TYPE:
case HASH_TABLE_TYPE:
case SMALL_ORDERED_HASH_MAP_TYPE:
case SMALL_ORDERED_HASH_SET_TYPE:
case SMALL_ORDERED_NAME_DICTIONARY_TYPE:
return true;
default:
return false;
}
}
Address HeapObject::GetFieldAddress(int field_offset) const {
return FIELD_ADDR(this, field_offset);
}
ACCESSORS2(EnumCache, keys, FixedArray, kKeysOffset)
ACCESSORS2(EnumCache, indices, FixedArray, kIndicesOffset)
DEFINE_DEOPT_ELEMENT_ACCESSORS2(TranslationByteArray, ByteArray)
DEFINE_DEOPT_ELEMENT_ACCESSORS2(InlinedFunctionCount, Smi)
DEFINE_DEOPT_ELEMENT_ACCESSORS2(LiteralArray, FixedArray)
DEFINE_DEOPT_ELEMENT_ACCESSORS2(OsrBytecodeOffset, Smi)
DEFINE_DEOPT_ELEMENT_ACCESSORS2(OsrPcOffset, Smi)
DEFINE_DEOPT_ELEMENT_ACCESSORS2(OptimizationId, Smi)
DEFINE_DEOPT_ELEMENT_ACCESSORS2(InliningPositions, PodArray<InliningPosition>)
DEFINE_DEOPT_ENTRY_ACCESSORS(BytecodeOffsetRaw, Smi)
DEFINE_DEOPT_ENTRY_ACCESSORS(TranslationIndex, Smi)
DEFINE_DEOPT_ENTRY_ACCESSORS(Pc, Smi)
int HeapObject::SizeFromMap(Map map) const {
int instance_size = map->instance_size();
if (instance_size != kVariableSizeSentinel) return instance_size;
// Only inline the most frequent cases.
InstanceType instance_type = map->instance_type();
if (IsInRange(instance_type, FIRST_FIXED_ARRAY_TYPE, LAST_FIXED_ARRAY_TYPE)) {
return FixedArray::SizeFor(
FixedArray::unchecked_cast(*this)->synchronized_length());
}
if (IsInRange(instance_type, FIRST_CONTEXT_TYPE, LAST_CONTEXT_TYPE)) {
// Native context has fixed size.
DCHECK_NE(instance_type, NATIVE_CONTEXT_TYPE);
return Context::SizeFor(Context::unchecked_cast(*this)->length());
}
if (instance_type == ONE_BYTE_STRING_TYPE ||
instance_type == ONE_BYTE_INTERNALIZED_STRING_TYPE) {
// Strings may get concurrently truncated, hence we have to access its
// length synchronized.
return SeqOneByteString::SizeFor(
SeqOneByteString::unchecked_cast(*this)->synchronized_length());
}
if (instance_type == BYTE_ARRAY_TYPE) {
return ByteArray::SizeFor(
ByteArray::unchecked_cast(*this)->synchronized_length());
}
if (instance_type == BYTECODE_ARRAY_TYPE) {
return BytecodeArray::SizeFor(
BytecodeArray::unchecked_cast(*this)->synchronized_length());
}
if (instance_type == FREE_SPACE_TYPE) {
return FreeSpace::unchecked_cast(*this)->relaxed_read_size();
}
if (instance_type == STRING_TYPE ||
instance_type == INTERNALIZED_STRING_TYPE) {
// Strings may get concurrently truncated, hence we have to access its
// length synchronized.
return SeqTwoByteString::SizeFor(
SeqTwoByteString::unchecked_cast(*this)->synchronized_length());
}
if (instance_type == FIXED_DOUBLE_ARRAY_TYPE) {
return FixedDoubleArray::SizeFor(
FixedDoubleArray::unchecked_cast(*this)->synchronized_length());
}
if (instance_type == FEEDBACK_METADATA_TYPE) {
return FeedbackMetadata::SizeFor(
FeedbackMetadata::unchecked_cast(*this)->synchronized_slot_count());
}
if (instance_type == DESCRIPTOR_ARRAY_TYPE) {
return DescriptorArray::SizeFor(
DescriptorArray::unchecked_cast(*this)->number_of_all_descriptors());
}
if (IsInRange(instance_type, FIRST_WEAK_FIXED_ARRAY_TYPE,
LAST_WEAK_FIXED_ARRAY_TYPE)) {
return WeakFixedArray::SizeFor(
WeakFixedArray::unchecked_cast(*this)->synchronized_length());
}
if (instance_type == WEAK_ARRAY_LIST_TYPE) {
return WeakArrayList::SizeForCapacity(
WeakArrayList::unchecked_cast(*this)->synchronized_capacity());
}
if (IsInRange(instance_type, FIRST_FIXED_TYPED_ARRAY_TYPE,
LAST_FIXED_TYPED_ARRAY_TYPE)) {
return FixedTypedArrayBase::unchecked_cast(*this)->TypedArraySize(
instance_type);
}
if (instance_type == SMALL_ORDERED_HASH_SET_TYPE) {
return SmallOrderedHashSet::SizeFor(
SmallOrderedHashSet::unchecked_cast(*this)->Capacity());
}
if (instance_type == SMALL_ORDERED_HASH_MAP_TYPE) {
return SmallOrderedHashMap::SizeFor(
SmallOrderedHashMap::unchecked_cast(*this)->Capacity());
}
if (instance_type == SMALL_ORDERED_NAME_DICTIONARY_TYPE) {
return SmallOrderedNameDictionary::SizeFor(
SmallOrderedNameDictionary::unchecked_cast(*this)->Capacity());
}
if (instance_type == PROPERTY_ARRAY_TYPE) {
return PropertyArray::SizeFor(
PropertyArray::cast(*this)->synchronized_length());
}
if (instance_type == FEEDBACK_VECTOR_TYPE) {
return FeedbackVector::SizeFor(
FeedbackVector::unchecked_cast(*this)->length());
}
if (instance_type == BIGINT_TYPE) {
return BigInt::SizeFor(BigInt::unchecked_cast(*this)->length());
}
if (instance_type == PRE_PARSED_SCOPE_DATA_TYPE) {
return PreParsedScopeData::SizeFor(
PreParsedScopeData::unchecked_cast(*this)->length());
}
if (instance_type == CODE_TYPE) {
return Code::unchecked_cast(*this)->CodeSize();
}
DCHECK_EQ(instance_type, EMBEDDER_DATA_ARRAY_TYPE);
return EmbedderDataArray::SizeFor(
EmbedderDataArray::unchecked_cast(*this)->length());
}
ACCESSORS2(TemplateObjectDescription, raw_strings, FixedArray,
kRawStringsOffset)
ACCESSORS2(TemplateObjectDescription, cooked_strings, FixedArray,
kCookedStringsOffset)
// static
Maybe<bool> Object::GreaterThan(Isolate* isolate, Handle<Object> x,
Handle<Object> y) {
Maybe<ComparisonResult> result = Compare(isolate, x, y);
if (result.IsJust()) {
switch (result.FromJust()) {
case ComparisonResult::kGreaterThan:
return Just(true);
case ComparisonResult::kLessThan:
case ComparisonResult::kEqual:
case ComparisonResult::kUndefined:
return Just(false);
}
}
return Nothing<bool>();
}
// static
Maybe<bool> Object::GreaterThanOrEqual(Isolate* isolate, Handle<Object> x,
Handle<Object> y) {
Maybe<ComparisonResult> result = Compare(isolate, x, y);
if (result.IsJust()) {
switch (result.FromJust()) {
case ComparisonResult::kEqual:
case ComparisonResult::kGreaterThan:
return Just(true);
case ComparisonResult::kLessThan:
case ComparisonResult::kUndefined:
return Just(false);
}
}
return Nothing<bool>();
}
// static
Maybe<bool> Object::LessThan(Isolate* isolate, Handle<Object> x,
Handle<Object> y) {
Maybe<ComparisonResult> result = Compare(isolate, x, y);
if (result.IsJust()) {
switch (result.FromJust()) {
case ComparisonResult::kLessThan:
return Just(true);
case ComparisonResult::kEqual:
case ComparisonResult::kGreaterThan:
case ComparisonResult::kUndefined:
return Just(false);
}
}
return Nothing<bool>();
}
// static
Maybe<bool> Object::LessThanOrEqual(Isolate* isolate, Handle<Object> x,
Handle<Object> y) {
Maybe<ComparisonResult> result = Compare(isolate, x, y);
if (result.IsJust()) {
switch (result.FromJust()) {
case ComparisonResult::kEqual:
case ComparisonResult::kLessThan:
return Just(true);
case ComparisonResult::kGreaterThan:
case ComparisonResult::kUndefined:
return Just(false);
}
}
return Nothing<bool>();
}
MaybeHandle<Object> Object::GetPropertyOrElement(Isolate* isolate,
Handle<Object> object,
Handle<Name> name) {
LookupIterator it = LookupIterator::PropertyOrElement(isolate, object, name);
return GetProperty(&it);
}
MaybeHandle<Object> Object::SetPropertyOrElement(Isolate* isolate,
Handle<Object> object,
Handle<Name> name,
Handle<Object> value,
LanguageMode language_mode,
StoreOrigin store_origin) {
LookupIterator it = LookupIterator::PropertyOrElement(isolate, object, name);
MAYBE_RETURN_NULL(SetProperty(&it, value, language_mode, store_origin));
return value;
}
MaybeHandle<Object> Object::GetPropertyOrElement(Handle<Object> receiver,
Handle<Name> name,
Handle<JSReceiver> holder) {
LookupIterator it = LookupIterator::PropertyOrElement(holder->GetIsolate(),
receiver, name, holder);
return GetProperty(&it);
}
// static
Object Object::GetSimpleHash(Object object) {
DisallowHeapAllocation no_gc;
if (object->IsSmi()) {
uint32_t hash = ComputeUnseededHash(Smi::ToInt(object));
return Smi::FromInt(hash & Smi::kMaxValue);
}
if (object->IsHeapNumber()) {
double num = HeapNumber::cast(object)->value();
if (std::isnan(num)) return Smi::FromInt(Smi::kMaxValue);
// Use ComputeUnseededHash for all values in Signed32 range, including -0,
// which is considered equal to 0 because collections use SameValueZero.
uint32_t hash;
// Check range before conversion to avoid undefined behavior.
if (num >= kMinInt && num <= kMaxInt && FastI2D(FastD2I(num)) == num) {
hash = ComputeUnseededHash(FastD2I(num));
} else {
hash = ComputeLongHash(double_to_uint64(num));
}
return Smi::FromInt(hash & Smi::kMaxValue);
}
if (object->IsName()) {
uint32_t hash = Name::cast(object)->Hash();
return Smi::FromInt(hash);
}
if (object->IsOddball()) {
uint32_t hash = Oddball::cast(object)->to_string()->Hash();
return Smi::FromInt(hash);
}
if (object->IsBigInt()) {
uint32_t hash = BigInt::cast(object)->Hash();
return Smi::FromInt(hash & Smi::kMaxValue);
}
DCHECK(object->IsJSReceiver());
return object;
}
Object Object::GetHash() {
DisallowHeapAllocation no_gc;
Object hash = GetSimpleHash(*this);
if (hash->IsSmi()) return hash;
DCHECK(IsJSReceiver());
JSReceiver receiver = JSReceiver::cast(*this);
return receiver->GetIdentityHash();
}
Handle<Object> ObjectHashTableShape::AsHandle(Handle<Object> key) {
return key;
}
Relocatable::Relocatable(Isolate* isolate) {
isolate_ = isolate;
prev_ = isolate->relocatable_top();
isolate->set_relocatable_top(this);
}
Relocatable::~Relocatable() {
DCHECK_EQ(isolate_->relocatable_top(), this);
isolate_->set_relocatable_top(prev_);
}
// Predictably converts HeapObject or Address to uint32 by calculating
// offset of the address in respective MemoryChunk.
static inline uint32_t ObjectAddressForHashing(Address object) {
uint32_t value = static_cast<uint32_t>(object);
return value & MemoryChunk::kAlignmentMask;
}
static inline Handle<Object> MakeEntryPair(Isolate* isolate, uint32_t index,
Handle<Object> value) {
Handle<Object> key = isolate->factory()->Uint32ToString(index);
Handle<FixedArray> entry_storage =
isolate->factory()->NewUninitializedFixedArray(2);
{
entry_storage->set(0, *key, SKIP_WRITE_BARRIER);
entry_storage->set(1, *value, SKIP_WRITE_BARRIER);
}
return isolate->factory()->NewJSArrayWithElements(entry_storage,
PACKED_ELEMENTS, 2);
}
static inline Handle<Object> MakeEntryPair(Isolate* isolate, Handle<Object> key,
Handle<Object> value) {
Handle<FixedArray> entry_storage =
isolate->factory()->NewUninitializedFixedArray(2);
{
entry_storage->set(0, *key, SKIP_WRITE_BARRIER);
entry_storage->set(1, *value, SKIP_WRITE_BARRIER);
}
return isolate->factory()->NewJSArrayWithElements(entry_storage,
PACKED_ELEMENTS, 2);
}
bool ScopeInfo::IsAsmModule() const {
return IsAsmModuleField::decode(Flags());
}
bool ScopeInfo::HasSimpleParameters() const {
return HasSimpleParametersField::decode(Flags());
}
#define FIELD_ACCESSORS(name) \
void ScopeInfo::Set##name(int value) { set(k##name, Smi::FromInt(value)); } \
int ScopeInfo::name() const { \
if (length() > 0) { \
return Smi::ToInt(get(k##name)); \
} else { \
return 0; \
} \
}
FOR_EACH_SCOPE_INFO_NUMERIC_FIELD(FIELD_ACCESSORS)
#undef FIELD_ACCESSORS
FreshlyAllocatedBigInt FreshlyAllocatedBigInt::cast(Object object) {
SLOW_DCHECK(object->IsBigInt());
return FreshlyAllocatedBigInt(object->ptr());
}
} // namespace internal
} // namespace v8
#include "src/objects/object-macros-undef.h"
#endif // V8_OBJECTS_INL_H_