SkPicture
127 SkPicture::~SkPicture() { 128 SkSafeUnref(fRecord); 129 SkDELETE(fPlayback); 130 }91 #define SkDELETE(obj) delete obj
#04,#03
222 SkPicturePlayback::~SkPicturePlayback() { 223 sk_free((void*) fReader.base()); 224 225 SkDELETE_ARRAY(fBitmaps); 226 SkDELETE_ARRAY(fMatrices); 227 SkDELETE_ARRAY(fPaints); 228 SkDELETE_ARRAY(fRegions); 229 230 SkSafeUnref(fPathHeap); 231 232 for (int i = 0; i < fPictureCount; i++) { 233 fPictureRefs[i]->unref(); 234 } 235 SkDELETE_ARRAY(fPictureRefs); 236 237 SkDELETE(fFactoryPlayback); 238 }
#02
227 SkRefCntPlayback::~SkRefCntPlayback() { 228 this->reset(NULL); 229 }
#01
231 void SkRefCntPlayback::reset(const SkRefCntSet* rec) { 232 for (int i = 0; i < fCount; i++) { 233 SkASSERT(fArray[i]); 234 fArray[i]->unref(); 235 } 236 SkDELETE_ARRAY(fArray); 237 238 if (rec) { 239 fCount = rec->count(); 240 fArray = SkNEW_ARRAY(SkRefCnt*, fCount); 241 rec->copyToArray(fArray); 242 for (int i = 0; i < fCount; i++) { 243 fArray[i]->ref(); 244 } 245 } else { 246 fCount = 0; 247 fArray = NULL; 248 } 249 }56 void unref() const { 57 SkASSERT(fRefCnt > 0); 58 if (sk_atomic_dec(&fRefCnt) == 1) { 59 fRefCnt = 1; // so our destructor won't complain 60 SkDELETE(this); 61 } 62 }
#00
38 #define sk_atomic_dec(addr) android_atomic_dec(addr)
187 extern inline int32_t android_atomic_dec(volatile int32_t *addr) 188 { 189 return android_atomic_add(-1, addr); 190 }
148 #if defined(__thumb__) 149 extern int32_t android_atomic_add(int32_t increment, 150 volatile int32_t *ptr); 151 #elif defined(__ARM_HAVE_LDREX_STREX) 152 extern inline int32_t android_atomic_add(int32_t increment, 153 volatile int32_t *ptr) 154 { 155 int32_t prev, tmp, status; 156 android_memory_barrier(); 157 do { 158 __asm__ __volatile__ ("ldrex %0, [%4]\n" 159 "add %1, %0, %5\n" 160 "strex %2, %1, [%4]" 161 : "=&r" (prev), "=&r" (tmp), 162 "=&r" (status), "+m" (*ptr) 163 : "r" (ptr), "Ir" (increment) 164 : "cc"); 165 } while (__builtin_expect(status != 0, 0)); 166 return prev; 167 } 168 #else 169 extern inline int32_t android_atomic_add(int32_t increment, 170 volatile int32_t *ptr) 171 { 172 int32_t prev, status; 173 android_memory_barrier(); 174 do { 175 prev = *ptr; 176 status = android_atomic_cas(prev, prev + increment, ptr); 177 } while (__builtin_expect(status != 0, 0)); 178 return prev; 179 } 180 #endif152 /* define __ARM_HAVE_LDREX_STREX for ARMv6 and ARMv7 architecture to be 153 * used in replacement of deprecated swp instruction 154 */ 155 #if __ARM_ARCH__ >= 6 156 # define __ARM_HAVE_LDREX_STREX 157 #endif例 MOV r1, #0x1 ; load the ‘lock taken’ value try LDREX r0, [LockAddr] ; load the lock value CMP r0, #0 ; is the lock free? STREXEQ r0, r1, [LockAddr] ; try and claim the lock CMPEQ r0, #0 ; did this succeed? BNE try ; no – try again .... ; yes – we have the lock__asm__ を試してみた - memologue
c - Working of __asm__ __volatile__ ("" : : : "memory") - Stack Overflow
GCCのインラインアセンブラの書き方 for x86 - OSのようなもの