static ALWAYS_INLINE id callAlloc(Class cls, boolcheckNil/*false*/, boolallocWithZone=false/*true*/) { if (slowpath(checkNil && !cls)) return nil;
#if __OBJC2__ //hasCustomAWZ( )方法是用来判断当前class是否有自定义的allocWithZone。 if (fastpath(!cls->ISA()->hasCustomAWZ())) { // 没有自定义的alloc/allocWithZone实现的时候 // No alloc/allocWithZone implementation. Go straight to the allocator. // fixme store hasCustomAWZ in the non-meta class and // add it to canAllocFast's summary if (fastpath(cls->canAllocFast())) { // No ctors, raw isa, etc. Go straight to the metal. bool dtor = cls->hasCxxDtor(); id obj = (id)calloc(1, cls->bits.fastInstanceSize()); if (slowpath(!obj)) return callBadAllocHandler(cls); obj->initInstanceIsa(cls, dtor); return obj; } else { // Has ctor or raw isa or something. Use the slower path. id obj = class_createInstance(cls, 0); if (slowpath(!obj)) return callBadAllocHandler(cls); return obj; } } #endif
// No shortcuts available. if (allocWithZone) return [clsallocWithZone:nil]; return [clsalloc]; }
do { transcribeToSideTable = false; oldisa = LoadExclusive(&isa.bits); newisa = oldisa; if (slowpath(!newisa.nonpointer)) { ClearExclusive(&isa.bits); if (!tryRetain && sideTableLocked) sidetable_unlock(); if (tryRetain) return sidetable_tryRetain() ? (id)this : nil; else return sidetable_retain(); } // don't check newisa.fast_rr; we already called any RR overrides if (slowpath(tryRetain && newisa.deallocating)) { ClearExclusive(&isa.bits); if (!tryRetain && sideTableLocked) sidetable_unlock(); return nil; } uintptr_t carry; newisa.bits = addc(newisa.bits, RC_ONE, 0, &carry); // extra_rc++
if (slowpath(carry)) { // newisa.extra_rc++ overflowed if (!handleOverflow) { ClearExclusive(&isa.bits); return rootRetain_overflow(tryRetain); } // Leave half of the retain counts inline and // prepare to copy the other half to the side table. if (!tryRetain && !sideTableLocked) sidetable_lock(); sideTableLocked = true; transcribeToSideTable = true; newisa.extra_rc = RC_HALF; newisa.has_sidetable_rc = true; } } while (slowpath(!StoreExclusive(&isa.bits, oldisa.bits, newisa.bits)));
if (slowpath(transcribeToSideTable)) { // Copy the other half of the retain counts to the side table. sidetable_addExtraRC_nolock(RC_HALF); }
if (slowpath(!tryRetain && sideTableLocked)) sidetable_unlock(); return (id)this; }
ALWAYS_INLINE bool objc_object::rootRelease(bool performDealloc, bool handleUnderflow) { if (isTaggedPointer()) returnfalse;
bool sideTableLocked = false;
isa_t oldisa; isa_t newisa;
retry: do { oldisa = LoadExclusive(&isa.bits); newisa = oldisa; if (slowpath(!newisa.nonpointer)) { ClearExclusive(&isa.bits); if (sideTableLocked) sidetable_unlock(); return sidetable_release(performDealloc); } // don't check newisa.fast_rr; we already called any RR overrides uintptr_t carry; newisa.bits = subc(newisa.bits, RC_ONE, 0, &carry); // extra_rc-- if (slowpath(carry)) { // don't ClearExclusive() goto underflow; } } while (slowpath(!StoreReleaseExclusive(&isa.bits, oldisa.bits, newisa.bits)));
if (slowpath(sideTableLocked)) sidetable_unlock(); returnfalse;
underflow: // newisa.extra_rc-- underflowed: borrow from side table or deallocate
// abandon newisa to undo the decrement newisa = oldisa;
if (slowpath(newisa.has_sidetable_rc)) { if (!handleUnderflow) { ClearExclusive(&isa.bits); return rootRelease_underflow(performDealloc); }
// Transfer retain count from side table to inline storage.
if (!sideTableLocked) { ClearExclusive(&isa.bits); sidetable_lock(); sideTableLocked = true; // Need to start over to avoid a race against // the nonpointer -> raw pointer transition. goto retry; }
// Try to remove some retain counts from the side table. size_t borrowed = sidetable_subExtraRC_nolock(RC_HALF);
// To avoid races, has_sidetable_rc must remain set // even if the side table count is now zero.
if (borrowed > 0) { // Side table retain count decreased. // Try to add them to the inline count. newisa.extra_rc = borrowed - 1; // redo the original decrement too bool stored = StoreReleaseExclusive(&isa.bits, oldisa.bits, newisa.bits); if (!stored) { // Inline update failed. // Try it again right now. This prevents livelock on LL/SC // architectures where the side table access itself may have // dropped the reservation. isa_t oldisa2 = LoadExclusive(&isa.bits); isa_t newisa2 = oldisa2; if (newisa2.nonpointer) { uintptr_t overflow; newisa2.bits = addc(newisa2.bits, RC_ONE * (borrowed-1), 0, &overflow); if (!overflow) { stored = StoreReleaseExclusive(&isa.bits, oldisa2.bits, newisa2.bits); } } }
if (!stored) { // Inline update failed. // Put the retains back in the side table. sidetable_addExtraRC_nolock(borrowed); goto retry; }
// Decrement successful after borrowing from side table. // This decrement cannot be the deallocating decrement - the side // table lock and has_sidetable_rc bit ensure that if everyone // else tried to -release while we worked, the last one would block. sidetable_unlock(); returnfalse; } else { // Side table is empty after all. Fall-through to the dealloc path. } }
// Really deallocate.
if (slowpath(newisa.deallocating)) { ClearExclusive(&isa.bits); if (sideTableLocked) sidetable_unlock(); return overrelease_error(); // does not actually return } newisa.deallocating = true; if (!StoreExclusive(&isa.bits, oldisa.bits, newisa.bits)) goto retry;
if (slowpath(sideTableLocked)) sidetable_unlock();
void *objc_destructInstance(idobj) { if (obj) { // Read all of the flags at once for performance. bool cxx = obj->hasCxxDtor(); bool assoc = obj->hasAssociatedObjects();
// This order is important. if (cxx) object_cxxDestruct(obj); if (assoc) _object_remove_assocations(obj); obj->clearDeallocating(); }
/*********************************************************************** * objc_destructInstance * Destroys an instance without freeing memory. * Calls C++ destructors. * Calls ARC ivar cleanup. * Removes associative references. * Returns `obj`. Does nothing if `obj` is nil. **********************************************************************/
void _object_remove_assocations(idobject) { vector< ObjcAssociation,ObjcAllocator<ObjcAssociation> > elements; { AssociationsManager manager; AssociationsHashMap &associations(manager.associations()); if (associations.size() == 0) return; disguised_ptr_t disguised_object = DISGUISE(object); AssociationsHashMap::iterator i = associations.find(disguised_object); if (i != associations.end()) { // copy all of the associations that need to be removed. ObjectAssociationMap *refs = i->second; for (ObjectAssociationMap::iterator j = refs->begin(), end = refs->end(); j != end; ++j) { elements.push_back(j->second); } // remove the secondary table. delete refs; associations.erase(i); } } // the calls to releaseValue() happen outside of the lock. for_each(elements.begin(), elements.end(), ReleaseValue()); }
清除剩余标识位
inline void objc_object::clearDeallocating() { if (slowpath(!isa.nonpointer)) { // Slow path for raw pointer isa. // 普通类型的指针指向的对象 sidetable_clearDeallocating(); } else if (slowpath(isa.weakly_referenced || isa.has_sidetable_rc)) { // Slow path for non-pointer isa with weak refs and/or side table data. // non-pointer isa 类型的对象。这种对象之所以要和普通对象区分开是因为它的弱引用标记以及是否使用散列表信息都存在“指针”中,所以要分开处理,但是处理的流程都是一样的,都是查看是否被弱引用指针指向,如果有则将这些设置为nil。紧接着从散列表中清除引用计数。最后将弱引用项从弱引用表中移除。 clearDeallocating_slow(); }
assert(!sidetable_present()); }
clearDeallocating 中实际上是用于处理剩余的标识位,这里分成是否是non-pointer isa: non-pointer isa 对象之所以要和普通对象区分开是因为它的弱引用标记以及是否使用散列表信息都存在“指针”中,所以要分开处理,但是处理的流程都是一样的,都是查看是否被弱引用指针指向,如果有则将这些设置为nil。紧接着从散列表中清除引用计数。最后将弱引用项从弱引用表中移除。
Class previouslyInitializedClass = nil; id oldObj; SideTable *oldTable; SideTable *newTable;
//..........
// Clean up old value, if any. if (haveOld) { //__weak 修饰的指针变量已经指向过某对象 // 需要把这个对象和此指针变量的关联断开 weak_unregister_no_lock(&oldTable->weak_table, oldObj, location); }
// Assign new value, if any. if (haveNew) { // 关联新对象和 __weak 修饰的指针变量 newObj = (objc_object *) weak_register_no_lock(&newTable->weak_table, (id)newObj, location, crashIfDeallocating); // weak_register_no_lock returns nil if weak store should be rejected
// Set is-weakly-referenced bit in refcount table. if (newObj && !newObj->isTaggedPointer()) { // 设置 isa 指针的 weakly_referenced 位 / sidetable 中的 SIDE_TABLE_WEAKLY_REFERENCED 位 // 标记此对象被 __weak 修饰的指针变量指向了,dealloc 时可以加速置 nil 处理 newObj->setWeaklyReferenced_nolock(); } // 设置 __weak 修饰的指针变量的值为 newObj // Do not set *location anywhere else. That would introduce a race. *location = (id)newObj; } else { // No new value. The storage is not changed. } SideTable::unlockTwo<haveOld, haveNew>(oldTable, newTable);
return (id)newObj; }
id weak_register_no_lock(weak_table_t *weak_table, id referent_id, id *referrer_id, bool crashIfDeallocating) { objc_object *referent = (objc_object *)referent_id; objc_object **referrer = (objc_object **)referrer_id;
if (!referent || referent->isTaggedPointer()) returnreferent_id;
// ensure that the referenced object is viable bool deallocating; if (!referent->ISA()->hasCustomRR()) { deallocating = referent->rootIsDeallocating(); } else { BOOL (*allowsWeakReference)(objc_object *, SEL) = (BOOL(*)(objc_object *, SEL)) object_getMethodImplementation((id)referent, SEL_allowsWeakReference); if ((IMP)allowsWeakReference == _objc_msgForward) { return nil; } deallocating = ! (*allowsWeakReference)(referent, SEL_allowsWeakReference); }
if (deallocating) { if (crashIfDeallocating) { _objc_fatal("Cannot form weak reference to instance (%p) of " "class %s. It is possible that this object was " "over-released, or is in the process of deallocation.", (void*)referent, object_getClassName((id)referent)); } else { return nil; } }
// now remember it and where it is being stored weak_entry_t *entry; if ((entry = weak_entry_for_referent(weak_table, referent))) { append_referrer(entry, referrer); } else { weak_entry_t new_entry(referent, referrer); weak_grow_maybe(weak_table); weak_entry_insert(weak_table, &new_entry); }
// Do not set *referrer. objc_storeWeak() requires that the // value not change.