Index: src/core/SkMallocPixelRef.cpp |
diff --git a/src/core/SkMallocPixelRef.cpp b/src/core/SkMallocPixelRef.cpp |
index f229e9de34d0fa8dbdd29304dd0ac0f7a78e5611..4ba6e049d8ac05a112b53b3888007b0cb4555810 100644 |
--- a/src/core/SkMallocPixelRef.cpp |
+++ b/src/core/SkMallocPixelRef.cpp |
@@ -1,27 +1,105 @@ |
- |
/* |
* Copyright 2011 Google Inc. |
* |
* Use of this source code is governed by a BSD-style license that can be |
* found in the LICENSE file. |
*/ |
+ |
#include "SkMallocPixelRef.h" |
#include "SkBitmap.h" |
#include "SkFlattenableBuffers.h" |
-SkMallocPixelRef::SkMallocPixelRef(void* storage, size_t size, |
- SkColorTable* ctable, bool ownPixels) { |
- if (NULL == storage) { |
- SkASSERT(ownPixels); |
- storage = sk_malloc_throw(size); |
+static bool check_info(const SkImageInfo& info, SkColorTable* ctable) { |
+ if (info.fWidth < 0 || |
+ info.fHeight < 0 || |
+ (unsigned)info.fColorType > (unsigned)kLastEnum_SkColorType || |
+ (unsigned)info.fAlphaType > (unsigned)kLastEnum_SkAlphaType) |
+ { |
+ return false; |
+ } |
+ |
+ // these seem like good checks, but currently we have (at least) tests |
+ // that expect the pixelref to succeed even when there is a mismatch |
+ // with colortables. fix? |
+#if 0 |
+ if (kIndex8_SkColorType == info.fColorType && NULL == ctable) { |
+ return false; |
+ } |
+ if (kIndex8_SkColorType != info.fColorType && NULL != ctable) { |
+ return false; |
+ } |
+#endif |
+ return true; |
+} |
+ |
+SkMallocPixelRef* SkMallocPixelRef::NewDirect(const SkImageInfo& info, |
+ void* addr, |
+ size_t rowBytes, |
+ SkColorTable* ctable) { |
+ if (!check_info(info, ctable)) { |
+ return NULL; |
+ } |
+ return SkNEW_ARGS(SkMallocPixelRef, (info, addr, rowBytes, ctable, false)); |
+} |
+ |
+SkMallocPixelRef* SkMallocPixelRef::NewAllocate(const SkImageInfo& info, |
+ size_t requestedRowBytes, |
+ SkColorTable* ctable) { |
+ if (!check_info(info, ctable)) { |
+ return NULL; |
+ } |
+ |
+ int32_t minRB = info.minRowBytes(); |
+ if (minRB < 0) { |
+ return NULL; // allocation will be too large |
+ } |
+ if (requestedRowBytes > 0 && (int32_t)requestedRowBytes < minRB) { |
+ return NULL; // cannot meet requested rowbytes |
+ } |
+ |
+ int32_t rowBytes; |
+ if (requestedRowBytes) { |
+ rowBytes = requestedRowBytes; |
+ } else { |
+ rowBytes = minRB; |
} |
+ |
+ Sk64 bigSize; |
+ bigSize.setMul(info.fHeight, rowBytes); |
+ if (!bigSize.is32()) { |
+ return NULL; |
+ } |
+ |
+ size_t size = bigSize.get32(); |
+ void* addr = sk_malloc_flags(size, 0); |
+ if (NULL == addr) { |
+ return NULL; |
+ } |
+ |
+ return SkNEW_ARGS(SkMallocPixelRef, (info, addr, rowBytes, ctable, true)); |
+} |
+ |
+/////////////////////////////////////////////////////////////////////////////// |
+ |
+SkMallocPixelRef::SkMallocPixelRef(const SkImageInfo& info, void* storage, |
+ size_t rowBytes, SkColorTable* ctable, |
+ bool ownsPixels) |
+ : SkPixelRef(info) |
+ , fOwnPixels(ownsPixels) |
+{ |
+ SkASSERT(check_info(info, ctable)); |
+ SkASSERT(rowBytes >= info.minRowBytes()); |
+ |
+ if (kIndex8_SkColorType != info.fColorType) { |
+ ctable = NULL; |
+ } |
+ |
fStorage = storage; |
- fSize = size; |
fCTable = ctable; |
+ fRB = rowBytes; |
SkSafeRef(ctable); |
- fOwnPixels = ownPixels; |
- |
- this->setPreLocked(fStorage, fCTable); |
+ |
+ this->setPreLocked(fStorage, fRB, fCTable); |
} |
SkMallocPixelRef::~SkMallocPixelRef() { |
@@ -31,19 +109,30 @@ SkMallocPixelRef::~SkMallocPixelRef() { |
} |
} |
-void* SkMallocPixelRef::onLockPixels(SkColorTable** ct) { |
- *ct = fCTable; |
- return fStorage; |
+bool SkMallocPixelRef::onNewLockPixels(LockRec* rec) { |
+ rec->fPixels = fStorage; |
+ rec->fRowBytes = fRB; |
+ rec->fColorTable = fCTable; |
+ return true; |
} |
void SkMallocPixelRef::onUnlockPixels() { |
// nothing to do |
} |
+size_t SkMallocPixelRef::getAllocatedSizeInBytes() const { |
+ return this->info().getSafeSize(fRB); |
+} |
+ |
void SkMallocPixelRef::flatten(SkFlattenableWriteBuffer& buffer) const { |
this->INHERITED::flatten(buffer); |
- buffer.writeByteArray(fStorage, fSize); |
+ buffer.write32(fRB); |
+ |
+ // TODO: replace this bulk write with a chunky one that can trim off any |
+ // trailing bytes on each scanline (in case rowbytes > width*size) |
+ size_t size = this->info().getSafeSize(fRB); |
+ buffer.writeByteArray(fStorage, size); |
buffer.writeBool(fCTable != NULL); |
if (fCTable) { |
fCTable->writeToBuffer(buffer); |
@@ -51,16 +140,18 @@ void SkMallocPixelRef::flatten(SkFlattenableWriteBuffer& buffer) const { |
} |
SkMallocPixelRef::SkMallocPixelRef(SkFlattenableReadBuffer& buffer) |
- : INHERITED(buffer, NULL) { |
- fSize = buffer.getArrayCount(); |
- fStorage = sk_malloc_throw(fSize); |
- buffer.readByteArray(fStorage, fSize); |
+ : INHERITED(buffer, NULL) |
+ , fOwnPixels(true) |
+{ |
+ fRB = buffer.read32(); |
+ size_t size = this->info().getSafeSize(fRB); |
+ fStorage = sk_malloc_throw(size); |
+ buffer.readByteArray(fStorage, size); |
if (buffer.readBool()) { |
fCTable = SkNEW_ARGS(SkColorTable, (buffer)); |
} else { |
fCTable = NULL; |
} |
- fOwnPixels = true; |
- this->setPreLocked(fStorage, fCTable); |
+ this->setPreLocked(fStorage, fRB, fCTable); |
} |