1.Bitmap的内存大小
环境
- 手机的physical density:284
- 原图大小:1920*1200
- 原图存放文件夹位置:mipmap-xhdpi
假设在上述的环境中,一张1920*1200的图片加载进内存中,它的实际宽高是多少呢,通过代码我们可以取到bitmap的宽高,结果发现实际的宽高相对于原图实际宽高是做了缩放,那么这个过程到底是什么样,想弄清楚这个问题,我们必须要从底层入手进行分析。
bitmap:width=1704;height=1065
1、Bitmap bitmap= BitmapFactory.decodeResource(getResources(),R.mipmap.hero);
2、 public static Bitmap decodeResource(Resources res, int id) {
return decodeResource(res, id, null);
}
3、bm = decodeResourceStream(res, value, is, null, opts);
4、decodeStream(is, pad, opts)
5、 private static Bitmap decodeStreamInternal(@NonNull InputStream is,
@Nullable Rect outPadding, @Nullable Options opts) {
// ASSERT(is != null);
byte [] tempStorage = null;
if (opts != null) tempStorage = opts.inTempStorage;
if (tempStorage == null) tempStorage = new byte[DECODE_BUFFER_SIZE];
//调用native方法
return nativeDecodeStream(is, tempStorage, outPadding, opts,
Options.nativeInBitmap(opts),
Options.nativeColorSpace(opts));
}
我们从BitmaFactory.decodeResource()方法一步步跟踪进去发现,最后是调用了JNI的nativeDecodeStream方法,返回bitmap对象。
static jobject nativeDecodeStream(JNIEnv* env, jobject clazz, jobject is, jbyteArray storage,
jobject padding, jobject options) {
jobject bitmap = NULL;
std::unique_ptr<SkStream> stream(CreateJavaInputStreamAdaptor(env, is, storage));
if (stream.get()) {
std::unique_ptr<SkStreamRewindable> bufferedStream(
SkFrontBufferedStream::Create(stream.release(), SkCodec::MinBufferedBytesNeeded()));
SkASSERT(bufferedStream.get() != NULL);
bitmap = doDecode(env, bufferedStream.release(), padding, options);
}
return bitmap;
}
static jobject doDecode(JNIEnv* env, SkStreamRewindable* stream, jobject padding, jobject options) {
// This function takes ownership of the input stream. Since the SkAndroidCodec
// will take ownership of the stream, we don't necessarily need to take ownership
// here. This is a precaution - if we were to return before creating the codec,
// we need to make sure that we delete the stream.
std::unique_ptr<SkStreamRewindable> streamDeleter(stream);
// Set default values for the options parameters.
int sampleSize = 1;
bool onlyDecodeSize = false;
SkColorType prefColorType = kN32_SkColorType;
bool isHardware = false;
bool isMutable = false;
float scale = 1.0f;
bool requireUnpremultiplied = false;
jobject javaBitmap = NULL;
sk_sp<SkColorSpace> prefColorSpace = nullptr;
// Update with options supplied by the client.
if (options != NULL) {
sampleSize = env->GetIntField(options, gOptions_sampleSizeFieldID);
// Correct a non-positive sampleSize. sampleSize defaults to zero within the
// options object, which is strange.
if (sampleSize <= 0) {
sampleSize = 1;
}
if (env->GetBooleanField(options, gOptions_justBoundsFieldID)) {
onlyDecodeSize = true;
}
// initialize these, in case we fail later on
env->SetIntField(options, gOptions_widthFieldID, -1);
env->SetIntField(options, gOptions_heightFieldID, -1);
env->SetObjectField(options, gOptions_mimeFieldID, 0);
env->SetObjectField(options, gOptions_outConfigFieldID, 0);
env->SetObjectField(options, gOptions_outColorSpaceFieldID, 0);
jobject jconfig = env->GetObjectField(options, gOptions_configFieldID);
prefColorType = GraphicsJNI::getNativeBitmapColorType(env, jconfig);
jobject jcolorSpace = env->GetObjectField(options, gOptions_colorSpaceFieldID);
prefColorSpace = GraphicsJNI::getNativeColorSpace(env, jcolorSpace);
isHardware = GraphicsJNI::isHardwareConfig(env, jconfig);
isMutable = env->GetBooleanField(options, gOptions_mutableFieldID);
requireUnpremultiplied = !env->GetBooleanField(options, gOptions_premultipliedFieldID);
javaBitmap = env->GetObjectField(options, gOptions_bitmapFieldID);
if (env->GetBooleanField(options, gOptions_scaledFieldID)) {
const int density = env->GetIntField(options, gOptions_densityFieldID);
const int targetDensity = env->GetIntField(options, gOptions_targetDensityFieldID);
const int screenDensity = env->GetIntField(options, gOptions_screenDensityFieldID);
if (density != 0 && targetDensity != 0 && density != screenDensity) {
scale = (float) targetDensity / density;
}
}
}
if (isMutable && isHardware) {
doThrowIAE(env, "Bitmaps with Config.HARWARE are always immutable");
return nullObjectReturn("Cannot create mutable hardware bitmap");
}
// Create the codec.
NinePatchPeeker peeker;
std::unique_ptr<SkAndroidCodec> codec(SkAndroidCodec::NewFromStream(
streamDeleter.release(), &peeker));
if (!codec.get()) {
return nullObjectReturn("SkAndroidCodec::NewFromStream returned null");
}
// Do not allow ninepatch decodes to 565. In the past, decodes to 565
// would dither, and we do not want to pre-dither ninepatches, since we
// know that they will be stretched. We no longer dither 565 decodes,
// but we continue to prevent ninepatches from decoding to 565, in order
// to maintain the old behavior.
if (peeker.mPatch && kRGB_565_SkColorType == prefColorType) {
prefColorType = kN32_SkColorType;
}
// Determine the output size.
SkISize size = codec->getSampledDimensions(sampleSize);
int scaledWidth = size.width();
int scaledHeight = size.height();
bool willScale = false;
// Apply a fine scaling step if necessary.
if (needsFineScale(codec->getInfo().dimensions(), size, sampleSize)) {
willScale = true;
scaledWidth = codec->getInfo().width() / sampleSize;
scaledHeight = codec->getInfo().height() / sampleSize;
}
// Set the decode colorType
SkColorType decodeColorType = codec->computeOutputColorType(prefColorType);
sk_sp<SkColorSpace> decodeColorSpace = codec->computeOutputColorSpace(
decodeColorType, prefColorSpace);
// Set the options and return if the client only wants the size.
if (options != NULL) {
jstring mimeType = encodedFormatToString(
env, (SkEncodedImageFormat)codec->getEncodedFormat());
if (env->ExceptionCheck()) {
return nullObjectReturn("OOM in encodedFormatToString()");
}
env->SetIntField(options, gOptions_widthFieldID, scaledWidth);
env->SetIntField(options, gOptions_heightFieldID, scaledHeight);
env->SetObjectField(options, gOptions_mimeFieldID, mimeType);
SkColorType outColorType = decodeColorType;
// Scaling can affect the output color type
if (willScale || scale != 1.0f) {
outColorType = colorTypeForScaledOutput(outColorType);
}
jint configID = GraphicsJNI::colorTypeToLegacyBitmapConfig(outColorType);
if (isHardware) {
configID = GraphicsJNI::kHardware_LegacyBitmapConfig;
}
jobject config = env->CallStaticObjectMethod(gBitmapConfig_class,
gBitmapConfig_nativeToConfigMethodID, configID);
env->SetObjectField(options, gOptions_outConfigFieldID, config);
env->SetObjectField(options, gOptions_outColorSpaceFieldID,
GraphicsJNI::getColorSpace(env, decodeColorSpace, decodeColorType));
if (onlyDecodeSize) {
return nullptr;
}
}
// Scale is necessary due to density differences.
if (scale != 1.0f) {
willScale = true;
scaledWidth = static_cast<int>(scaledWidth * scale + 0.5f);
scaledHeight = static_cast<int>(scaledHeight * scale + 0.5f);
}
android::Bitmap* reuseBitmap = nullptr;
unsigned int existingBufferSize = 0;
if (javaBitmap != NULL) {
reuseBitmap = &bitmap::toBitmap(env, javaBitmap);
if (reuseBitmap->isImmutable()) {
ALOGW("Unable to reuse an immutable bitmap as an image decoder target.");
javaBitmap = NULL;
reuseBitmap = nullptr;
} else {
existingBufferSize = bitmap::getBitmapAllocationByteCount(env, javaBitmap);
}
}
HeapAllocator defaultAllocator;
RecyclingPixelAllocator recyclingAllocator(reuseBitmap, existingBufferSize);
ScaleCheckingAllocator scaleCheckingAllocator(scale, existingBufferSize);
SkBitmap::HeapAllocator heapAllocator;
SkBitmap::Allocator* decodeAllocator;
if (javaBitmap != nullptr && willScale) {
// This will allocate pixels using a HeapAllocator, since there will be an extra
// scaling step that copies these pixels into Java memory. This allocator
// also checks that the recycled javaBitmap is large enough.
decodeAllocator = &scaleCheckingAllocator;
} else if (javaBitmap != nullptr) {
decodeAllocator = &recyclingAllocator;
} else if (willScale || isHardware) {
// This will allocate pixels using a HeapAllocator,
// for scale case: there will be an extra scaling step.
// for hardware case: there will be extra swizzling & upload to gralloc step.
decodeAllocator = &heapAllocator;
} else {
decodeAllocator = &defaultAllocator;
}
// Construct a color table for the decode if necessary
sk_sp<SkColorTable> colorTable(nullptr);
SkPMColor* colorPtr = nullptr;
int* colorCount = nullptr;
int maxColors = 256;
SkPMColor colors[256];
if (kIndex_8_SkColorType == decodeColorType) {
colorTable.reset(new SkColorTable(colors, maxColors));
// SkColorTable expects us to initialize all of the colors before creating an
// SkColorTable. However, we are using SkBitmap with an Allocator to allocate
// memory for the decode, so we need to create the SkColorTable before decoding.
// It is safe for SkAndroidCodec to modify the colors because this SkBitmap is
// not being used elsewhere.
colorPtr = const_cast<SkPMColor*>(colorTable->readColors());
colorCount = &maxColors;
}
SkAlphaType alphaType = codec->computeOutputAlphaType(requireUnpremultiplied);
const SkImageInfo decodeInfo = SkImageInfo::Make(size.width(), size.height(),
decodeColorType, alphaType, decodeColorSpace);
// For wide gamut images, we will leave the color space on the SkBitmap. Otherwise,
// use the default.
SkImageInfo bitmapInfo = decodeInfo;
if (decodeInfo.colorSpace() && decodeInfo.colorSpace()->isSRGB()) {
bitmapInfo = bitmapInfo.makeColorSpace(GraphicsJNI::colorSpaceForType(decodeColorType));
}
if (decodeColorType == kGray_8_SkColorType) {
// The legacy implementation of BitmapFactory used kAlpha8 for
// grayscale images (before kGray8 existed). While the codec
// recognizes kGray8, we need to decode into a kAlpha8 bitmap
// in order to avoid a behavior change.
bitmapInfo =
bitmapInfo.makeColorType(kAlpha_8_SkColorType).makeAlphaType(kPremul_SkAlphaType);
}
SkBitmap decodingBitmap;
if (!decodingBitmap.setInfo(bitmapInfo) ||
!decodingBitmap.tryAllocPixels(decodeAllocator, colorTable.get())) {
// SkAndroidCodec should recommend a valid SkImageInfo, so setInfo()
// should only only fail if the calculated value for rowBytes is too
// large.
// tryAllocPixels() can fail due to OOM on the Java heap, OOM on the
// native heap, or the recycled javaBitmap being too small to reuse.
return nullptr;
}
// Use SkAndroidCodec to perform the decode.
SkAndroidCodec::AndroidOptions codecOptions;
codecOptions.fZeroInitialized = decodeAllocator == &defaultAllocator ?
SkCodec::kYes_ZeroInitialized : SkCodec::kNo_ZeroInitialized;
codecOptions.fColorPtr = colorPtr;
codecOptions.fColorCount = colorCount;
codecOptions.fSampleSize = sampleSize;
SkCodec::Result result = codec->getAndroidPixels(decodeInfo, decodingBitmap.getPixels(),
decodingBitmap.rowBytes(), &codecOptions);
switch (result) {
case SkCodec::kSuccess:
case SkCodec::kIncompleteInput:
break;
default:
return nullObjectReturn("codec->getAndroidPixels() failed.");
}
jbyteArray ninePatchChunk = NULL;
if (peeker.mPatch != NULL) {
if (willScale) {
scaleNinePatchChunk(peeker.mPatch, scale, scaledWidth, scaledHeight);
}
size_t ninePatchArraySize = peeker.mPatch->serializedSize();
ninePatchChunk = env->NewByteArray(ninePatchArraySize);
if (ninePatchChunk == NULL) {
return nullObjectReturn("ninePatchChunk == null");
}
jbyte* array = (jbyte*) env->GetPrimitiveArrayCritical(ninePatchChunk, NULL);
if (array == NULL) {
return nullObjectReturn("primitive array == null");
}
memcpy(array, peeker.mPatch, peeker.mPatchSize);
env->ReleasePrimitiveArrayCritical(ninePatchChunk, array, 0);
}
jobject ninePatchInsets = NULL;
if (peeker.mHasInsets) {
ninePatchInsets = env->NewObject(gInsetStruct_class, gInsetStruct_constructorMethodID,
peeker.mOpticalInsets[0], peeker.mOpticalInsets[1],
peeker.mOpticalInsets[2], peeker.mOpticalInsets[3],
peeker.mOutlineInsets[0], peeker.mOutlineInsets[1],
peeker.mOutlineInsets[2], peeker.mOutlineInsets[3],
peeker.mOutlineRadius, peeker.mOutlineAlpha, scale);
if (ninePatchInsets == NULL) {
return nullObjectReturn("nine patch insets == null");
}
if (javaBitmap != NULL) {
env->SetObjectField(javaBitmap, gBitmap_ninePatchInsetsFieldID, ninePatchInsets);
}
}
SkBitmap outputBitmap;
if (willScale) {
// This is weird so let me explain: we could use the scale parameter
// directly, but for historical reasons this is how the corresponding
// Dalvik code has always behaved. We simply recreate the behavior here.
// The result is slightly different from simply using scale because of
// the 0.5f rounding bias applied when computing the target image size
const float sx = scaledWidth / float(decodingBitmap.width());
const float sy = scaledHeight / float(decodingBitmap.height());
// Set the allocator for the outputBitmap.
SkBitmap::Allocator* outputAllocator;
if (javaBitmap != nullptr) {
outputAllocator = &recyclingAllocator;
} else {
outputAllocator = &defaultAllocator;
}
SkColorType scaledColorType = colorTypeForScaledOutput(decodingBitmap.colorType());
// FIXME: If the alphaType is kUnpremul and the image has alpha, the
// colors may not be correct, since Skia does not yet support drawing
// to/from unpremultiplied bitmaps.
outputBitmap.setInfo(
bitmapInfo.makeWH(scaledWidth, scaledHeight).makeColorType(scaledColorType));
if (!outputBitmap.tryAllocPixels(outputAllocator, NULL)) {
// This should only fail on OOM. The recyclingAllocator should have
// enough memory since we check this before decoding using the
// scaleCheckingAllocator.
return nullObjectReturn("allocation failed for scaled bitmap");
}
SkPaint paint;
// kSrc_Mode instructs us to overwrite the uninitialized pixels in
// outputBitmap. Otherwise we would blend by default, which is not
// what we want.
paint.setBlendMode(SkBlendMode::kSrc);
paint.setFilterQuality(kLow_SkFilterQuality); // bilinear filtering
SkCanvas canvas(outputBitmap, SkCanvas::ColorBehavior::kLegacy);
canvas.scale(sx, sy);
canvas.drawBitmap(decodingBitmap, 0.0f, 0.0f, &paint);
} else {
outputBitmap.swap(decodingBitmap);
}
if (padding) {
if (peeker.mPatch != NULL) {
GraphicsJNI::set_jrect(env, padding,
peeker.mPatch->paddingLeft, peeker.mPatch->paddingTop,
peeker.mPatch->paddingRight, peeker.mPatch->paddingBottom);
} else {
GraphicsJNI::set_jrect(env, padding, -1, -1, -1, -1);
}
}
// If we get here, the outputBitmap should have an installed pixelref.
if (outputBitmap.pixelRef() == NULL) {
return nullObjectReturn("Got null SkPixelRef");
}
if (!isMutable && javaBitmap == NULL) {
// promise we will never change our pixels (great for sharing and pictures)
outputBitmap.setImmutable();
}
bool isPremultiplied = !requireUnpremultiplied;
if (javaBitmap != nullptr) {
bitmap::reinitBitmap(env, javaBitmap, outputBitmap.info(), isPremultiplied);
outputBitmap.notifyPixelsChanged();
// If a java bitmap was passed in for reuse, pass it back
return javaBitmap;
}
int bitmapCreateFlags = 0x0;
if (isMutable) bitmapCreateFlags |= android::bitmap::kBitmapCreateFlag_Mutable;
if (isPremultiplied) bitmapCreateFlags |= android::bitmap::kBitmapCreateFlag_Premultiplied;
if (isHardware) {
sk_sp<Bitmap> hardwareBitmap = Bitmap::allocateHardwareBitmap(outputBitmap);
return bitmap::createBitmap(env, hardwareBitmap.release(), bitmapCreateFlags,
ninePatchChunk, ninePatchInsets, -1);
}
// now create the java bitmap
return bitmap::createBitmap(env, defaultAllocator.getStorageObjAndReset(),
bitmapCreateFlags, ninePatchChunk, ninePatchInsets, -1);
}
看下面这段代码,发现通过targetDensity和density的比值得到一个scale的值,最终显示的bitmap 大小会根据这个值进行对应的缩放
if (env->GetBooleanField(options, gOptions_scaledFieldID)) {
const int density = env->GetIntField(options, gOptions_densityFieldID);//图片所放文件夹对应的dpi 譬如xxh-drawable对应的dpi是480
const int targetDensity = env->GetIntField(options, gOptions_targetDensityFieldID);//实际设备的dpi
const int screenDensity = env->GetIntField(options, gOptions_screenDensityFieldID);
if (density != 0 && targetDensity != 0 && density != screenDensity) {
scale = (float) targetDensity / density;
}
}
在BitmapFactory中对density进行初始化
public static Bitmap decodeResourceStream(@Nullable Resources res, @Nullable TypedValue value,
@Nullable InputStream is, @Nullable Rect pad, @Nullable Options opts) {
validate(opts);
if (opts == null) {
opts = new Options();
}
if (opts.inDensity == 0 && value != null) {
final int density = value.density;
if (density == TypedValue.DENSITY_DEFAULT) {
opts.inDensity = DisplayMetrics.DENSITY_DEFAULT;
} else if (density != TypedValue.DENSITY_NONE) {
opts.inDensity = density;
}
}
if (opts.inTargetDensity == 0 && res != null) {
opts.inTargetDensity = res.getDisplayMetrics().densityDpi;
}
return decodeStream(is, pad, opts);
}
对上面的代码流程进行一个总结:
1.解析 java 层传递过来的 Options 的参数,如 simpleSize ,isMutable,javaBitmap 等等,同时计算出 scale 。
2.获取当前图片的大小,根据 sampleSize 判断是否需要压缩,同时计算出 scaledWidth ,scaledHeight。
3.设置 options 宽高为 scaledWidth ,scaledHeight ,如果只是解析宽高那么就直接返回,也就是 options.inJustDecodeBounds = true 时,但是这里需要注意返回的是,资源图片的宽高并不是 Bitmap 最终的宽高。(我们大部分人对这个有误解)
4.创建 native 层的 SkImageInfo 和 SkBitmap ,然后调用 tryAllocPixels 去开辟图片的内存空间,然后调用 getAndroidPixels 去解析像素值 ,这里的 decodingBitmap 也并不是最终需要返回的 Bitmap ,而是原资源图片的 Bitmap 。
5.构建需要返回的 outputBitmap ,如果需要缩放那么重新去开辟一块内存空间,如果不需要缩放直接调用 swap 方法即可。最后判断有没有复用的 JavaBitmap ,如果有复用调用 reinitBitmap 然后直接返回,如果没有则调用 createBitmap 去创建一个新的 Bitmap 。
网友评论