seperate iOS project from main project
This commit is contained in:
22
Example/Pods/YYImage/LICENSE
generated
Normal file
22
Example/Pods/YYImage/LICENSE
generated
Normal file
@@ -0,0 +1,22 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2015 ibireme <ibireme@gmail.com>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
|
384
Example/Pods/YYImage/README.md
generated
Executable file
384
Example/Pods/YYImage/README.md
generated
Executable file
@@ -0,0 +1,384 @@
|
||||
YYImage
|
||||
==============
|
||||
[](https://raw.githubusercontent.com/ibireme/YYImage/master/LICENSE)
|
||||
[](https://github.com/Carthage/Carthage)
|
||||
[](http://cocoapods.org/?q= YYImage)
|
||||
[](http://cocoapods.org/?q= YYImage)
|
||||
[](https://www.apple.com/nl/ios/)
|
||||
[](https://travis-ci.org/ibireme/YYImage)
|
||||
|
||||
Image framework for iOS to display/encode/decode animated WebP, APNG, GIF, and more.<br/>
|
||||
(It's a component of [YYKit](https://github.com/ibireme/YYKit))
|
||||
|
||||

|
||||
|
||||
Features
|
||||
==============
|
||||
- Display/encode/decode animated image with these types:<br/> WebP, APNG, GIF.
|
||||
- Display/encode/decode still image with these types:<br/> WebP, PNG, GIF, JPEG, JP2, TIFF, BMP, ICO, ICNS.
|
||||
- Baseline/progressive/interlaced image decode with these types:<br/> PNG, GIF, JPEG, BMP.
|
||||
- Display frame based image animation and sprite sheet animation.
|
||||
- Dynamic memory buffer for lower memory usage.
|
||||
- Fully compatible with UIImage and UIImageView class.
|
||||
- Extendable protocol for custom image animation.
|
||||
- Fully documented.
|
||||
|
||||
Usage
|
||||
==============
|
||||
|
||||
###Display animated image
|
||||
|
||||
// File: ani@3x.gif
|
||||
UIImage *image = [YYImage imageNamed:@"ani.gif"];
|
||||
UIImageView *imageView = [[YYAnimatedImageView alloc] initWithImage:image];
|
||||
[self.view addSubView:imageView];
|
||||
|
||||
|
||||
###Display frame animation
|
||||
|
||||
// Files: frame1.png, frame2.png, frame3.png
|
||||
NSArray *paths = @[@"/ani/frame1.png", @"/ani/frame2.png", @"/ani/frame3.png"];
|
||||
NSArray *times = @[@0.1, @0.2, @0.1];
|
||||
UIImage *image = [YYFrameImage alloc] initWithImagePaths:paths frameDurations:times repeats:YES];
|
||||
UIImageView *imageView = [YYAnimatedImageView alloc] initWithImage:image];
|
||||
[self.view addSubView:imageView];
|
||||
|
||||
###Display sprite sheet animation
|
||||
|
||||
// 8 * 12 sprites in a single sheet image
|
||||
UIImage *spriteSheet = [UIImage imageNamed:@"sprite-sheet"];
|
||||
NSMutableArray *contentRects = [NSMutableArray new];
|
||||
NSMutableArray *durations = [NSMutableArray new];
|
||||
for (int j = 0; j < 12; j++) {
|
||||
for (int i = 0; i < 8; i++) {
|
||||
CGRect rect;
|
||||
rect.size = CGSizeMake(img.size.width / 8, img.size.height / 12);
|
||||
rect.origin.x = img.size.width / 8 * i;
|
||||
rect.origin.y = img.size.height / 12 * j;
|
||||
[contentRects addObject:[NSValue valueWithCGRect:rect]];
|
||||
[durations addObject:@(1 / 60.0)];
|
||||
}
|
||||
}
|
||||
YYSpriteSheetImage *sprite;
|
||||
sprite = [[YYSpriteSheetImage alloc] initWithSpriteSheetImage:img
|
||||
contentRects:contentRects
|
||||
frameDurations:durations
|
||||
loopCount:0];
|
||||
YYAnimatedImageView *imageView = [YYAnimatedImageView new];
|
||||
imageView.size = CGSizeMake(img.size.width / 8, img.size.height / 12);
|
||||
imageView.image = sprite;
|
||||
[self.view addSubView:imageView];
|
||||
|
||||
###Animation control
|
||||
|
||||
YYAnimatedImageView *imageView = ...;
|
||||
// pause:
|
||||
[imageView stopAnimating];
|
||||
// play:
|
||||
[imageView startAnimating];
|
||||
// set frame index:
|
||||
imageView.currentAnimatedImageIndex = 12;
|
||||
// get current status
|
||||
image.currentIsPlayingAnimation;
|
||||
|
||||
###Image decoder
|
||||
|
||||
// Decode single frame:
|
||||
NSData *data = [NSData dataWithContentsOfFile:@"/tmp/image.webp"];
|
||||
YYImageDecoder *decoder = [YYImageDecoder decoderWithData:data scale:2.0];
|
||||
UIImage image = [decoder frameAtIndex:0 decodeForDisplay:YES].image;
|
||||
|
||||
// Progressive:
|
||||
NSMutableData *data = [NSMutableData new];
|
||||
YYImageDecoder *decoder = [[YYImageDecoder alloc] initWithScale:2.0];
|
||||
while(newDataArrived) {
|
||||
[data appendData:newData];
|
||||
[decoder updateData:data final:NO];
|
||||
if (decoder.frameCount > 0) {
|
||||
UIImage image = [decoder frameAtIndex:0 decodeForDisplay:YES].image;
|
||||
// progressive display...
|
||||
}
|
||||
}
|
||||
[decoder updateData:data final:YES];
|
||||
UIImage image = [decoder frameAtIndex:0 decodeForDisplay:YES].image;
|
||||
// final display...
|
||||
|
||||
###Image encoder
|
||||
|
||||
// Encode still image:
|
||||
YYImageEncoder *jpegEncoder = [[YYImageEncoder alloc] initWithType:YYImageTypeJPEG];
|
||||
jpegEncoder.quality = 0.9;
|
||||
[jpegEncoder addImage:image duration:0];
|
||||
NSData jpegData = [jpegEncoder encode];
|
||||
|
||||
// Encode animated image:
|
||||
YYImageEncoder *webpEncoder = [[YYImageEncoder alloc] initWithType:YYImageTypeWebP];
|
||||
webpEncoder.loopCount = 5;
|
||||
[webpEncoder addImage:image0 duration:0.1];
|
||||
[webpEncoder addImage:image1 duration:0.15];
|
||||
[webpEncoder addImage:image2 duration:0.2];
|
||||
NSData webpData = [webpEncoder encode];
|
||||
|
||||
###Image type detection
|
||||
|
||||
// Get image type from image data
|
||||
YYImageType type = YYImageDetectType(data);
|
||||
if (type == YYImageTypePNG) ...
|
||||
|
||||
|
||||
Installation
|
||||
==============
|
||||
|
||||
### CocoaPods
|
||||
|
||||
1. Update cocoapods to the latest version.
|
||||
2. Add `pod 'YYImage'` to your Podfile.
|
||||
3. Run `pod install` or `pod update`.
|
||||
4. Import \<YYImage/YYImage.h\>.
|
||||
5. Notice: it doesn't include WebP subspec by default, if you want to support WebP format, you may add `pod 'YYImage/WebP'` to your Podfile.
|
||||
|
||||
### Carthage
|
||||
|
||||
1. Add `github "ibireme/YYImage"` to your Cartfile.
|
||||
2. Run `carthage update --platform ios` and add the framework to your project.
|
||||
3. Import \<YYImage/YYImage.h\>.
|
||||
4. Notice: carthage framework doesn't include WebP component, if you want to support WebP format, use CocoaPods or install manually.
|
||||
|
||||
### Manually
|
||||
|
||||
1. Download all the files in the YYImage subdirectory.
|
||||
2. Add the source files to your Xcode project.
|
||||
3. Link with required frameworks:
|
||||
* UIKit
|
||||
* CoreFoundation
|
||||
* QuartzCore
|
||||
* AssetsLibrary
|
||||
* ImageIO
|
||||
* Accelerate
|
||||
* MobileCoreServices
|
||||
* libz
|
||||
4. Import `YYImage.h`.
|
||||
5. Notice: if you want to support WebP format, you may add `Vendor/WebP.framework`(static library) to your Xcode project.
|
||||
|
||||
FAQ
|
||||
==============
|
||||
_Q: Why I can't display WebP image?_
|
||||
|
||||
A: Make sure you added the `WebP.framework` in your project. You may call `YYImageWebPAvailable()` to check whether the WebP subspec is installed correctly.
|
||||
|
||||
_Q: Why I can't play APNG animation?_
|
||||
|
||||
A: You should disable the `Compress PNG Files` and `Remove Text Metadata From PNG Files` in your project's build settings. Or you can rename your APNG file's extension name with `apng`.
|
||||
|
||||
Documentation
|
||||
==============
|
||||
Full API documentation is available on [CocoaDocs](http://cocoadocs.org/docsets/YYImage/).<br/>
|
||||
You can also install documentation locally using [appledoc](https://github.com/tomaz/appledoc).
|
||||
|
||||
|
||||
|
||||
Requirements
|
||||
==============
|
||||
This library requires `iOS 6.0+` and `Xcode 7.0+`.
|
||||
|
||||
|
||||
License
|
||||
==============
|
||||
YYImage is provided under the MIT license. See LICENSE file for details.
|
||||
|
||||
|
||||
<br/><br/>
|
||||
---
|
||||
中文介绍
|
||||
==============
|
||||
YYImage: 功能强大的 iOS 图像框架。<br/>
|
||||
(该项目是 [YYKit](https://github.com/ibireme/YYKit) 组件之一)
|
||||
|
||||

|
||||
|
||||
特性
|
||||
==============
|
||||
- 支持以下类型动画图像的播放/编码/解码:<br/>
|
||||
WebP, APNG, GIF。
|
||||
- 支持以下类型静态图像的显示/编码/解码:<br>
|
||||
WebP, PNG, GIF, JPEG, JP2, TIFF, BMP, ICO, ICNS。
|
||||
- 支持以下类型图片的渐进式/逐行扫描/隔行扫描解码:<br/>
|
||||
PNG, GIF, JPEG, BMP。
|
||||
- 支持多张图片构成的帧动画播放,支持单张图片的 sprite sheet 动画。
|
||||
- 高效的动态内存缓存管理,以保证高性能低内存的动画播放。
|
||||
- 完全兼容 UIImage 和 UIImageView,使用方便。
|
||||
- 保留可扩展的接口,以支持自定义动画。
|
||||
- 每个类和方法都有完善的文档注释。
|
||||
|
||||
|
||||
用法
|
||||
==============
|
||||
|
||||
###显示动画类型的图片
|
||||
|
||||
// 文件: ani@3x.gif
|
||||
UIImage *image = [YYImage imageNamed:@"ani.gif"];
|
||||
UIImageView *imageView = [[YYAnimatedImageView alloc] initWithImage:image];
|
||||
[self.view addSubView:imageView];
|
||||
|
||||
|
||||
###播放帧动画
|
||||
|
||||
// 文件: frame1.png, frame2.png, frame3.png
|
||||
NSArray *paths = @[@"/ani/frame1.png", @"/ani/frame2.png", @"/ani/frame3.png"];
|
||||
NSArray *times = @[@0.1, @0.2, @0.1];
|
||||
UIImage *image = [YYFrameImage alloc] initWithImagePaths:paths frameDurations:times repeats:YES];
|
||||
UIImageView *imageView = [YYAnimatedImageView alloc] initWithImage:image];
|
||||
[self.view addSubView:imageView];
|
||||
|
||||
###播放 sprite sheet 动画
|
||||
|
||||
// 8 * 12 sprites in a single sheet image
|
||||
UIImage *spriteSheet = [UIImage imageNamed:@"sprite-sheet"];
|
||||
NSMutableArray *contentRects = [NSMutableArray new];
|
||||
NSMutableArray *durations = [NSMutableArray new];
|
||||
for (int j = 0; j < 12; j++) {
|
||||
for (int i = 0; i < 8; i++) {
|
||||
CGRect rect;
|
||||
rect.size = CGSizeMake(img.size.width / 8, img.size.height / 12);
|
||||
rect.origin.x = img.size.width / 8 * i;
|
||||
rect.origin.y = img.size.height / 12 * j;
|
||||
[contentRects addObject:[NSValue valueWithCGRect:rect]];
|
||||
[durations addObject:@(1 / 60.0)];
|
||||
}
|
||||
}
|
||||
YYSpriteSheetImage *sprite;
|
||||
sprite = [[YYSpriteSheetImage alloc] initWithSpriteSheetImage:img
|
||||
contentRects:contentRects
|
||||
frameDurations:durations
|
||||
loopCount:0];
|
||||
YYAnimatedImageView *imageView = [YYAnimatedImageView new];
|
||||
imageView.size = CGSizeMake(img.size.width / 8, img.size.height / 12);
|
||||
imageView.image = sprite;
|
||||
[self.view addSubView:imageView];
|
||||
|
||||
###动画播放控制
|
||||
|
||||
YYAnimatedImageView *imageView = ...;
|
||||
// 暂停:
|
||||
[imageView stopAnimating];
|
||||
// 播放:
|
||||
[imageView startAnimating];
|
||||
// 设置播放进度:
|
||||
imageView.currentAnimatedImageIndex = 12;
|
||||
// 获取播放状态:
|
||||
image.currentIsPlayingAnimation;
|
||||
//上面两个属性都支持 KVO。
|
||||
|
||||
###图片解码
|
||||
|
||||
// 解码单帧图片:
|
||||
NSData *data = [NSData dataWithContentsOfFile:@"/tmp/image.webp"];
|
||||
YYImageDecoder *decoder = [YYImageDecoder decoderWithData:data scale:2.0];
|
||||
UIImage image = [decoder frameAtIndex:0 decodeForDisplay:YES].image;
|
||||
|
||||
// 渐进式图片解码 (可用于图片下载显示):
|
||||
NSMutableData *data = [NSMutableData new];
|
||||
YYImageDecoder *decoder = [[YYImageDecoder alloc] initWithScale:2.0];
|
||||
while(newDataArrived) {
|
||||
[data appendData:newData];
|
||||
[decoder updateData:data final:NO];
|
||||
if (decoder.frameCount > 0) {
|
||||
UIImage image = [decoder frameAtIndex:0 decodeForDisplay:YES].image;
|
||||
// progressive display...
|
||||
}
|
||||
}
|
||||
[decoder updateData:data final:YES];
|
||||
UIImage image = [decoder frameAtIndex:0 decodeForDisplay:YES].image;
|
||||
// final display...
|
||||
|
||||
###图片编码
|
||||
|
||||
// 编码静态图 (支持各种常见图片格式):
|
||||
YYImageEncoder *jpegEncoder = [[YYImageEncoder alloc] initWithType:YYImageTypeJPEG];
|
||||
jpegEncoder.quality = 0.9;
|
||||
[jpegEncoder addImage:image duration:0];
|
||||
NSData jpegData = [jpegEncoder encode];
|
||||
|
||||
// 编码动态图 (支持 GIF/APNG/WebP):
|
||||
YYImageEncoder *webpEncoder = [[YYImageEncoder alloc] initWithType:YYImageTypeWebP];
|
||||
webpEncoder.loopCount = 5;
|
||||
[webpEncoder addImage:image0 duration:0.1];
|
||||
[webpEncoder addImage:image1 duration:0.15];
|
||||
[webpEncoder addImage:image2 duration:0.2];
|
||||
NSData webpData = [webpEncoder encode];
|
||||
|
||||
###图片类型探测
|
||||
|
||||
// 获取图片类型
|
||||
YYImageType type = YYImageDetectType(data);
|
||||
if (type == YYImageTypePNG) ...
|
||||
|
||||
|
||||
安装
|
||||
==============
|
||||
|
||||
### CocoaPods
|
||||
|
||||
1. 将 cocoapods 更新至最新版本.
|
||||
2. 在 Podfile 中添加 `pod 'YYImage'`。
|
||||
3. 执行 `pod install` 或 `pod update`。
|
||||
4. 导入 \<YYImage/YYImage.h\>。
|
||||
5. 注意:pod 配置并没有包含 WebP 组件, 如果你需要支持 WebP,可以在 Podfile 中添加 `pod 'YYImage/WebP'`。
|
||||
|
||||
### Carthage
|
||||
|
||||
1. 在 Cartfile 中添加 `github "ibireme/YYImage"`。
|
||||
2. 执行 `carthage update --platform ios` 并将生成的 framework 添加到你的工程。
|
||||
3. 导入 \<YYImage/YYImage.h\>。
|
||||
4. 注意:carthage framework 并没有包含 WebP 组件。如果你需要支持 WebP,可以用 CocoaPods 安装,或者手动安装。
|
||||
|
||||
### 手动安装
|
||||
|
||||
1. 下载 YYImage 文件夹内的所有内容。
|
||||
2. 将 YYImage 内的源文件添加(拖放)到你的工程。
|
||||
3. 链接以下 frameworks:
|
||||
* UIKit
|
||||
* CoreFoundation
|
||||
* QuartzCore
|
||||
* AssetsLibrary
|
||||
* ImageIO
|
||||
* Accelerate
|
||||
* MobileCoreServices
|
||||
* libz
|
||||
4. 导入 `YYImage.h`。
|
||||
5. 注意:如果你需要支持 WebP,可以将 `Vendor/WebP.framework`(静态库) 加入你的工程。
|
||||
|
||||
常见问题
|
||||
==============
|
||||
_Q: 为什么我不能显示 WebP 图片?_
|
||||
|
||||
A: 确保 `WebP.framework` 已经被添加到你的工程内了。你可以调用 `YYImageWebPAvailable()` 来检查一下 WebP 组件是否被正确安装。
|
||||
|
||||
_Q: 为什么我不能播放 APNG 动画?_
|
||||
|
||||
A: 你应该禁用 Build Settings 中的 `Compress PNG Files` 和 `Remove Text Metadata From PNG Files`. 或者你也可以把 APNG 文件的扩展名改为`apng`.
|
||||
|
||||
文档
|
||||
==============
|
||||
你可以在 [CocoaDocs](http://cocoadocs.org/docsets/YYImage/) 查看在线 API 文档,也可以用 [appledoc](https://github.com/tomaz/appledoc) 本地生成文档。
|
||||
|
||||
|
||||
系统要求
|
||||
==============
|
||||
该项目最低支持 `iOS 6.0` 和 `Xcode 7.0`。
|
||||
|
||||
|
||||
许可证
|
||||
==============
|
||||
YYImage 使用 MIT 许可证,详情见 LICENSE 文件。
|
||||
|
||||
|
||||
相关链接
|
||||
==============
|
||||
[移动端图片格式调研](http://blog.ibireme.com/2015/11/02/mobile_image_benchmark/)<br/>
|
||||
|
||||
[iOS 处理图片的一些小 Tip](http://blog.ibireme.com/2015/11/02/ios_image_tips/)
|
||||
|
145
Example/Pods/YYImage/Vendor/WebP.framework/Headers/config.h
generated
vendored
Normal file
145
Example/Pods/YYImage/Vendor/WebP.framework/Headers/config.h
generated
vendored
Normal file
@@ -0,0 +1,145 @@
|
||||
/* src/webp/config.h. Generated from config.h.in by configure. */
|
||||
/* src/webp/config.h.in. Generated from configure.ac by autoheader. */
|
||||
|
||||
/* Define if building universal (internal helper macro) */
|
||||
/* #undef AC_APPLE_UNIVERSAL_BUILD */
|
||||
|
||||
/* Set to 1 if __builtin_bswap16 is available */
|
||||
#define HAVE_BUILTIN_BSWAP16 1
|
||||
|
||||
/* Set to 1 if __builtin_bswap32 is available */
|
||||
#define HAVE_BUILTIN_BSWAP32 1
|
||||
|
||||
/* Set to 1 if __builtin_bswap64 is available */
|
||||
#define HAVE_BUILTIN_BSWAP64 1
|
||||
|
||||
/* Define to 1 if you have the <dlfcn.h> header file. */
|
||||
#define HAVE_DLFCN_H 1
|
||||
|
||||
/* Define to 1 if you have the <GLUT/glut.h> header file. */
|
||||
/* #undef HAVE_GLUT_GLUT_H */
|
||||
|
||||
/* Define to 1 if you have the <GL/glut.h> header file. */
|
||||
/* #undef HAVE_GL_GLUT_H */
|
||||
|
||||
/* Define to 1 if you have the <inttypes.h> header file. */
|
||||
#define HAVE_INTTYPES_H 1
|
||||
|
||||
/* Define to 1 if you have the <memory.h> header file. */
|
||||
#define HAVE_MEMORY_H 1
|
||||
|
||||
/* Define to 1 if you have the <OpenGL/glut.h> header file. */
|
||||
/* #undef HAVE_OPENGL_GLUT_H */
|
||||
|
||||
/* Have PTHREAD_PRIO_INHERIT. */
|
||||
#define HAVE_PTHREAD_PRIO_INHERIT 1
|
||||
|
||||
/* Define to 1 if you have the <shlwapi.h> header file. */
|
||||
/* #undef HAVE_SHLWAPI_H */
|
||||
|
||||
/* Define to 1 if you have the <stdint.h> header file. */
|
||||
#define HAVE_STDINT_H 1
|
||||
|
||||
/* Define to 1 if you have the <stdlib.h> header file. */
|
||||
#define HAVE_STDLIB_H 1
|
||||
|
||||
/* Define to 1 if you have the <strings.h> header file. */
|
||||
#define HAVE_STRINGS_H 1
|
||||
|
||||
/* Define to 1 if you have the <string.h> header file. */
|
||||
#define HAVE_STRING_H 1
|
||||
|
||||
/* Define to 1 if you have the <sys/stat.h> header file. */
|
||||
#define HAVE_SYS_STAT_H 1
|
||||
|
||||
/* Define to 1 if you have the <sys/types.h> header file. */
|
||||
#define HAVE_SYS_TYPES_H 1
|
||||
|
||||
/* Define to 1 if you have the <unistd.h> header file. */
|
||||
#define HAVE_UNISTD_H 1
|
||||
|
||||
/* Define to 1 if you have the <wincodec.h> header file. */
|
||||
/* #undef HAVE_WINCODEC_H */
|
||||
|
||||
/* Define to 1 if you have the <windows.h> header file. */
|
||||
/* #undef HAVE_WINDOWS_H */
|
||||
|
||||
/* Define to the sub-directory in which libtool stores uninstalled libraries.
|
||||
*/
|
||||
#define LT_OBJDIR ".libs/"
|
||||
|
||||
/* Name of package */
|
||||
#define PACKAGE "libwebp"
|
||||
|
||||
/* Define to the address where bug reports for this package should be sent. */
|
||||
#define PACKAGE_BUGREPORT "https://bugs.chromium.org/p/webp"
|
||||
|
||||
/* Define to the full name of this package. */
|
||||
#define PACKAGE_NAME "libwebp"
|
||||
|
||||
/* Define to the full name and version of this package. */
|
||||
#define PACKAGE_STRING "libwebp 0.5.0"
|
||||
|
||||
/* Define to the one symbol short name of this package. */
|
||||
#define PACKAGE_TARNAME "libwebp"
|
||||
|
||||
/* Define to the home page for this package. */
|
||||
#define PACKAGE_URL "http://developers.google.com/speed/webp"
|
||||
|
||||
/* Define to the version of this package. */
|
||||
#define PACKAGE_VERSION "0.5.0"
|
||||
|
||||
/* Define to necessary symbol if this constant uses a non-standard name on
|
||||
your system. */
|
||||
/* #undef PTHREAD_CREATE_JOINABLE */
|
||||
|
||||
/* Define to 1 if you have the ANSI C header files. */
|
||||
#define STDC_HEADERS 1
|
||||
|
||||
/* Version number of package */
|
||||
#define VERSION "0.5.0"
|
||||
|
||||
/* Enable experimental code */
|
||||
/* #undef WEBP_EXPERIMENTAL_FEATURES */
|
||||
|
||||
/* Define to 1 to force aligned memory operations */
|
||||
/* #undef WEBP_FORCE_ALIGNED */
|
||||
|
||||
/* Set to 1 if AVX2 is supported */
|
||||
/* #undef WEBP_HAVE_AVX2 */
|
||||
|
||||
/* Set to 1 if GIF library is installed */
|
||||
/* #undef WEBP_HAVE_GIF */
|
||||
|
||||
/* Set to 1 if OpenGL is supported */
|
||||
/* #undef WEBP_HAVE_GL */
|
||||
|
||||
/* Set to 1 if JPEG library is installed */
|
||||
/* #undef WEBP_HAVE_JPEG */
|
||||
|
||||
/* Set to 1 if PNG library is installed */
|
||||
/* #undef WEBP_HAVE_PNG */
|
||||
|
||||
/* Set to 1 if SSE2 is supported */
|
||||
/* #undef WEBP_HAVE_SSE2 */
|
||||
|
||||
/* Set to 1 if SSE4.1 is supported */
|
||||
/* #undef WEBP_HAVE_SSE41 */
|
||||
|
||||
/* Set to 1 if TIFF library is installed */
|
||||
/* #undef WEBP_HAVE_TIFF */
|
||||
|
||||
/* Undefine this to disable thread support. */
|
||||
#define WEBP_USE_THREAD 1
|
||||
|
||||
/* Define WORDS_BIGENDIAN to 1 if your processor stores words with the most
|
||||
significant byte first (like Motorola and SPARC, unlike Intel). */
|
||||
#if defined AC_APPLE_UNIVERSAL_BUILD
|
||||
# if defined __BIG_ENDIAN__
|
||||
# define WORDS_BIGENDIAN 1
|
||||
# endif
|
||||
#else
|
||||
# ifndef WORDS_BIGENDIAN
|
||||
/* # undef WORDS_BIGENDIAN */
|
||||
# endif
|
||||
#endif
|
488
Example/Pods/YYImage/Vendor/WebP.framework/Headers/decode.h
generated
vendored
Normal file
488
Example/Pods/YYImage/Vendor/WebP.framework/Headers/decode.h
generated
vendored
Normal file
@@ -0,0 +1,488 @@
|
||||
// Copyright 2010 Google Inc. All Rights Reserved.
|
||||
//
|
||||
// Use of this source code is governed by a BSD-style license
|
||||
// that can be found in the COPYING file in the root of the source
|
||||
// tree. An additional intellectual property rights grant can be found
|
||||
// in the file PATENTS. All contributing project authors may
|
||||
// be found in the AUTHORS file in the root of the source tree.
|
||||
// -----------------------------------------------------------------------------
|
||||
//
|
||||
// Main decoding functions for WebP images.
|
||||
//
|
||||
// Author: Skal (pascal.massimino@gmail.com)
|
||||
|
||||
#ifndef WEBP_WEBP_DECODE_H_
|
||||
#define WEBP_WEBP_DECODE_H_
|
||||
|
||||
#include "./types.h"
|
||||
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
#define WEBP_DECODER_ABI_VERSION 0x0208 // MAJOR(8b) + MINOR(8b)
|
||||
|
||||
// Note: forward declaring enumerations is not allowed in (strict) C and C++,
|
||||
// the types are left here for reference.
|
||||
// typedef enum VP8StatusCode VP8StatusCode;
|
||||
// typedef enum WEBP_CSP_MODE WEBP_CSP_MODE;
|
||||
typedef struct WebPRGBABuffer WebPRGBABuffer;
|
||||
typedef struct WebPYUVABuffer WebPYUVABuffer;
|
||||
typedef struct WebPDecBuffer WebPDecBuffer;
|
||||
typedef struct WebPIDecoder WebPIDecoder;
|
||||
typedef struct WebPBitstreamFeatures WebPBitstreamFeatures;
|
||||
typedef struct WebPDecoderOptions WebPDecoderOptions;
|
||||
typedef struct WebPDecoderConfig WebPDecoderConfig;
|
||||
|
||||
// Return the decoder's version number, packed in hexadecimal using 8bits for
|
||||
// each of major/minor/revision. E.g: v2.5.7 is 0x020507.
|
||||
WEBP_EXTERN(int) WebPGetDecoderVersion(void);
|
||||
|
||||
// Retrieve basic header information: width, height.
|
||||
// This function will also validate the header and return 0 in
|
||||
// case of formatting error.
|
||||
// Pointers 'width' and 'height' can be passed NULL if deemed irrelevant.
|
||||
WEBP_EXTERN(int) WebPGetInfo(const uint8_t* data, size_t data_size,
|
||||
int* width, int* height);
|
||||
|
||||
// Decodes WebP images pointed to by 'data' and returns RGBA samples, along
|
||||
// with the dimensions in *width and *height. The ordering of samples in
|
||||
// memory is R, G, B, A, R, G, B, A... in scan order (endian-independent).
|
||||
// The returned pointer should be deleted calling WebPFree().
|
||||
// Returns NULL in case of error.
|
||||
WEBP_EXTERN(uint8_t*) WebPDecodeRGBA(const uint8_t* data, size_t data_size,
|
||||
int* width, int* height);
|
||||
|
||||
// Same as WebPDecodeRGBA, but returning A, R, G, B, A, R, G, B... ordered data.
|
||||
WEBP_EXTERN(uint8_t*) WebPDecodeARGB(const uint8_t* data, size_t data_size,
|
||||
int* width, int* height);
|
||||
|
||||
// Same as WebPDecodeRGBA, but returning B, G, R, A, B, G, R, A... ordered data.
|
||||
WEBP_EXTERN(uint8_t*) WebPDecodeBGRA(const uint8_t* data, size_t data_size,
|
||||
int* width, int* height);
|
||||
|
||||
// Same as WebPDecodeRGBA, but returning R, G, B, R, G, B... ordered data.
|
||||
// If the bitstream contains transparency, it is ignored.
|
||||
WEBP_EXTERN(uint8_t*) WebPDecodeRGB(const uint8_t* data, size_t data_size,
|
||||
int* width, int* height);
|
||||
|
||||
// Same as WebPDecodeRGB, but returning B, G, R, B, G, R... ordered data.
|
||||
WEBP_EXTERN(uint8_t*) WebPDecodeBGR(const uint8_t* data, size_t data_size,
|
||||
int* width, int* height);
|
||||
|
||||
|
||||
// Decode WebP images pointed to by 'data' to Y'UV format(*). The pointer
|
||||
// returned is the Y samples buffer. Upon return, *u and *v will point to
|
||||
// the U and V chroma data. These U and V buffers need NOT be passed to
|
||||
// WebPFree(), unlike the returned Y luma one. The dimension of the U and V
|
||||
// planes are both (*width + 1) / 2 and (*height + 1)/ 2.
|
||||
// Upon return, the Y buffer has a stride returned as '*stride', while U and V
|
||||
// have a common stride returned as '*uv_stride'.
|
||||
// Return NULL in case of error.
|
||||
// (*) Also named Y'CbCr. See: http://en.wikipedia.org/wiki/YCbCr
|
||||
WEBP_EXTERN(uint8_t*) WebPDecodeYUV(const uint8_t* data, size_t data_size,
|
||||
int* width, int* height,
|
||||
uint8_t** u, uint8_t** v,
|
||||
int* stride, int* uv_stride);
|
||||
|
||||
// Releases memory returned by the WebPDecode*() functions above.
|
||||
WEBP_EXTERN(void) WebPFree(void* ptr);
|
||||
|
||||
// These five functions are variants of the above ones, that decode the image
|
||||
// directly into a pre-allocated buffer 'output_buffer'. The maximum storage
|
||||
// available in this buffer is indicated by 'output_buffer_size'. If this
|
||||
// storage is not sufficient (or an error occurred), NULL is returned.
|
||||
// Otherwise, output_buffer is returned, for convenience.
|
||||
// The parameter 'output_stride' specifies the distance (in bytes)
|
||||
// between scanlines. Hence, output_buffer_size is expected to be at least
|
||||
// output_stride x picture-height.
|
||||
WEBP_EXTERN(uint8_t*) WebPDecodeRGBAInto(
|
||||
const uint8_t* data, size_t data_size,
|
||||
uint8_t* output_buffer, size_t output_buffer_size, int output_stride);
|
||||
WEBP_EXTERN(uint8_t*) WebPDecodeARGBInto(
|
||||
const uint8_t* data, size_t data_size,
|
||||
uint8_t* output_buffer, size_t output_buffer_size, int output_stride);
|
||||
WEBP_EXTERN(uint8_t*) WebPDecodeBGRAInto(
|
||||
const uint8_t* data, size_t data_size,
|
||||
uint8_t* output_buffer, size_t output_buffer_size, int output_stride);
|
||||
|
||||
// RGB and BGR variants. Here too the transparency information, if present,
|
||||
// will be dropped and ignored.
|
||||
WEBP_EXTERN(uint8_t*) WebPDecodeRGBInto(
|
||||
const uint8_t* data, size_t data_size,
|
||||
uint8_t* output_buffer, size_t output_buffer_size, int output_stride);
|
||||
WEBP_EXTERN(uint8_t*) WebPDecodeBGRInto(
|
||||
const uint8_t* data, size_t data_size,
|
||||
uint8_t* output_buffer, size_t output_buffer_size, int output_stride);
|
||||
|
||||
// WebPDecodeYUVInto() is a variant of WebPDecodeYUV() that operates directly
|
||||
// into pre-allocated luma/chroma plane buffers. This function requires the
|
||||
// strides to be passed: one for the luma plane and one for each of the
|
||||
// chroma ones. The size of each plane buffer is passed as 'luma_size',
|
||||
// 'u_size' and 'v_size' respectively.
|
||||
// Pointer to the luma plane ('*luma') is returned or NULL if an error occurred
|
||||
// during decoding (or because some buffers were found to be too small).
|
||||
WEBP_EXTERN(uint8_t*) WebPDecodeYUVInto(
|
||||
const uint8_t* data, size_t data_size,
|
||||
uint8_t* luma, size_t luma_size, int luma_stride,
|
||||
uint8_t* u, size_t u_size, int u_stride,
|
||||
uint8_t* v, size_t v_size, int v_stride);
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Output colorspaces and buffer
|
||||
|
||||
// Colorspaces
|
||||
// Note: the naming describes the byte-ordering of packed samples in memory.
|
||||
// For instance, MODE_BGRA relates to samples ordered as B,G,R,A,B,G,R,A,...
|
||||
// Non-capital names (e.g.:MODE_Argb) relates to pre-multiplied RGB channels.
|
||||
// RGBA-4444 and RGB-565 colorspaces are represented by following byte-order:
|
||||
// RGBA-4444: [r3 r2 r1 r0 g3 g2 g1 g0], [b3 b2 b1 b0 a3 a2 a1 a0], ...
|
||||
// RGB-565: [r4 r3 r2 r1 r0 g5 g4 g3], [g2 g1 g0 b4 b3 b2 b1 b0], ...
|
||||
// In the case WEBP_SWAP_16BITS_CSP is defined, the bytes are swapped for
|
||||
// these two modes:
|
||||
// RGBA-4444: [b3 b2 b1 b0 a3 a2 a1 a0], [r3 r2 r1 r0 g3 g2 g1 g0], ...
|
||||
// RGB-565: [g2 g1 g0 b4 b3 b2 b1 b0], [r4 r3 r2 r1 r0 g5 g4 g3], ...
|
||||
|
||||
typedef enum WEBP_CSP_MODE {
|
||||
MODE_RGB = 0, MODE_RGBA = 1,
|
||||
MODE_BGR = 2, MODE_BGRA = 3,
|
||||
MODE_ARGB = 4, MODE_RGBA_4444 = 5,
|
||||
MODE_RGB_565 = 6,
|
||||
// RGB-premultiplied transparent modes (alpha value is preserved)
|
||||
MODE_rgbA = 7,
|
||||
MODE_bgrA = 8,
|
||||
MODE_Argb = 9,
|
||||
MODE_rgbA_4444 = 10,
|
||||
// YUV modes must come after RGB ones.
|
||||
MODE_YUV = 11, MODE_YUVA = 12, // yuv 4:2:0
|
||||
MODE_LAST = 13
|
||||
} WEBP_CSP_MODE;
|
||||
|
||||
// Some useful macros:
|
||||
static WEBP_INLINE int WebPIsPremultipliedMode(WEBP_CSP_MODE mode) {
|
||||
return (mode == MODE_rgbA || mode == MODE_bgrA || mode == MODE_Argb ||
|
||||
mode == MODE_rgbA_4444);
|
||||
}
|
||||
|
||||
static WEBP_INLINE int WebPIsAlphaMode(WEBP_CSP_MODE mode) {
|
||||
return (mode == MODE_RGBA || mode == MODE_BGRA || mode == MODE_ARGB ||
|
||||
mode == MODE_RGBA_4444 || mode == MODE_YUVA ||
|
||||
WebPIsPremultipliedMode(mode));
|
||||
}
|
||||
|
||||
static WEBP_INLINE int WebPIsRGBMode(WEBP_CSP_MODE mode) {
|
||||
return (mode < MODE_YUV);
|
||||
}
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// WebPDecBuffer: Generic structure for describing the output sample buffer.
|
||||
|
||||
struct WebPRGBABuffer { // view as RGBA
|
||||
uint8_t* rgba; // pointer to RGBA samples
|
||||
int stride; // stride in bytes from one scanline to the next.
|
||||
size_t size; // total size of the *rgba buffer.
|
||||
};
|
||||
|
||||
struct WebPYUVABuffer { // view as YUVA
|
||||
uint8_t* y, *u, *v, *a; // pointer to luma, chroma U/V, alpha samples
|
||||
int y_stride; // luma stride
|
||||
int u_stride, v_stride; // chroma strides
|
||||
int a_stride; // alpha stride
|
||||
size_t y_size; // luma plane size
|
||||
size_t u_size, v_size; // chroma planes size
|
||||
size_t a_size; // alpha-plane size
|
||||
};
|
||||
|
||||
// Output buffer
|
||||
struct WebPDecBuffer {
|
||||
WEBP_CSP_MODE colorspace; // Colorspace.
|
||||
int width, height; // Dimensions.
|
||||
int is_external_memory; // If true, 'internal_memory' pointer is not used.
|
||||
union {
|
||||
WebPRGBABuffer RGBA;
|
||||
WebPYUVABuffer YUVA;
|
||||
} u; // Nameless union of buffer parameters.
|
||||
uint32_t pad[4]; // padding for later use
|
||||
|
||||
uint8_t* private_memory; // Internally allocated memory (only when
|
||||
// is_external_memory is false). Should not be used
|
||||
// externally, but accessed via the buffer union.
|
||||
};
|
||||
|
||||
// Internal, version-checked, entry point
|
||||
WEBP_EXTERN(int) WebPInitDecBufferInternal(WebPDecBuffer*, int);
|
||||
|
||||
// Initialize the structure as empty. Must be called before any other use.
|
||||
// Returns false in case of version mismatch
|
||||
static WEBP_INLINE int WebPInitDecBuffer(WebPDecBuffer* buffer) {
|
||||
return WebPInitDecBufferInternal(buffer, WEBP_DECODER_ABI_VERSION);
|
||||
}
|
||||
|
||||
// Free any memory associated with the buffer. Must always be called last.
|
||||
// Note: doesn't free the 'buffer' structure itself.
|
||||
WEBP_EXTERN(void) WebPFreeDecBuffer(WebPDecBuffer* buffer);
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Enumeration of the status codes
|
||||
|
||||
typedef enum VP8StatusCode {
|
||||
VP8_STATUS_OK = 0,
|
||||
VP8_STATUS_OUT_OF_MEMORY,
|
||||
VP8_STATUS_INVALID_PARAM,
|
||||
VP8_STATUS_BITSTREAM_ERROR,
|
||||
VP8_STATUS_UNSUPPORTED_FEATURE,
|
||||
VP8_STATUS_SUSPENDED,
|
||||
VP8_STATUS_USER_ABORT,
|
||||
VP8_STATUS_NOT_ENOUGH_DATA
|
||||
} VP8StatusCode;
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Incremental decoding
|
||||
//
|
||||
// This API allows streamlined decoding of partial data.
|
||||
// Picture can be incrementally decoded as data become available thanks to the
|
||||
// WebPIDecoder object. This object can be left in a SUSPENDED state if the
|
||||
// picture is only partially decoded, pending additional input.
|
||||
// Code example:
|
||||
//
|
||||
// WebPInitDecBuffer(&buffer);
|
||||
// buffer.colorspace = mode;
|
||||
// ...
|
||||
// WebPIDecoder* idec = WebPINewDecoder(&buffer);
|
||||
// while (has_more_data) {
|
||||
// // ... (get additional data)
|
||||
// status = WebPIAppend(idec, new_data, new_data_size);
|
||||
// if (status != VP8_STATUS_SUSPENDED ||
|
||||
// break;
|
||||
// }
|
||||
//
|
||||
// // The above call decodes the current available buffer.
|
||||
// // Part of the image can now be refreshed by calling to
|
||||
// // WebPIDecGetRGB()/WebPIDecGetYUVA() etc.
|
||||
// }
|
||||
// WebPIDelete(idec);
|
||||
|
||||
// Creates a new incremental decoder with the supplied buffer parameter.
|
||||
// This output_buffer can be passed NULL, in which case a default output buffer
|
||||
// is used (with MODE_RGB). Otherwise, an internal reference to 'output_buffer'
|
||||
// is kept, which means that the lifespan of 'output_buffer' must be larger than
|
||||
// that of the returned WebPIDecoder object.
|
||||
// The supplied 'output_buffer' content MUST NOT be changed between calls to
|
||||
// WebPIAppend() or WebPIUpdate() unless 'output_buffer.is_external_memory' is
|
||||
// set to 1. In such a case, it is allowed to modify the pointers, size and
|
||||
// stride of output_buffer.u.RGBA or output_buffer.u.YUVA, provided they remain
|
||||
// within valid bounds.
|
||||
// All other fields of WebPDecBuffer MUST remain constant between calls.
|
||||
// Returns NULL if the allocation failed.
|
||||
WEBP_EXTERN(WebPIDecoder*) WebPINewDecoder(WebPDecBuffer* output_buffer);
|
||||
|
||||
// This function allocates and initializes an incremental-decoder object, which
|
||||
// will output the RGB/A samples specified by 'csp' into a preallocated
|
||||
// buffer 'output_buffer'. The size of this buffer is at least
|
||||
// 'output_buffer_size' and the stride (distance in bytes between two scanlines)
|
||||
// is specified by 'output_stride'.
|
||||
// Additionally, output_buffer can be passed NULL in which case the output
|
||||
// buffer will be allocated automatically when the decoding starts. The
|
||||
// colorspace 'csp' is taken into account for allocating this buffer. All other
|
||||
// parameters are ignored.
|
||||
// Returns NULL if the allocation failed, or if some parameters are invalid.
|
||||
WEBP_EXTERN(WebPIDecoder*) WebPINewRGB(
|
||||
WEBP_CSP_MODE csp,
|
||||
uint8_t* output_buffer, size_t output_buffer_size, int output_stride);
|
||||
|
||||
// This function allocates and initializes an incremental-decoder object, which
|
||||
// will output the raw luma/chroma samples into a preallocated planes if
|
||||
// supplied. The luma plane is specified by its pointer 'luma', its size
|
||||
// 'luma_size' and its stride 'luma_stride'. Similarly, the chroma-u plane
|
||||
// is specified by the 'u', 'u_size' and 'u_stride' parameters, and the chroma-v
|
||||
// plane by 'v' and 'v_size'. And same for the alpha-plane. The 'a' pointer
|
||||
// can be pass NULL in case one is not interested in the transparency plane.
|
||||
// Conversely, 'luma' can be passed NULL if no preallocated planes are supplied.
|
||||
// In this case, the output buffer will be automatically allocated (using
|
||||
// MODE_YUVA) when decoding starts. All parameters are then ignored.
|
||||
// Returns NULL if the allocation failed or if a parameter is invalid.
|
||||
WEBP_EXTERN(WebPIDecoder*) WebPINewYUVA(
|
||||
uint8_t* luma, size_t luma_size, int luma_stride,
|
||||
uint8_t* u, size_t u_size, int u_stride,
|
||||
uint8_t* v, size_t v_size, int v_stride,
|
||||
uint8_t* a, size_t a_size, int a_stride);
|
||||
|
||||
// Deprecated version of the above, without the alpha plane.
|
||||
// Kept for backward compatibility.
|
||||
WEBP_EXTERN(WebPIDecoder*) WebPINewYUV(
|
||||
uint8_t* luma, size_t luma_size, int luma_stride,
|
||||
uint8_t* u, size_t u_size, int u_stride,
|
||||
uint8_t* v, size_t v_size, int v_stride);
|
||||
|
||||
// Deletes the WebPIDecoder object and associated memory. Must always be called
|
||||
// if WebPINewDecoder, WebPINewRGB or WebPINewYUV succeeded.
|
||||
WEBP_EXTERN(void) WebPIDelete(WebPIDecoder* idec);
|
||||
|
||||
// Copies and decodes the next available data. Returns VP8_STATUS_OK when
|
||||
// the image is successfully decoded. Returns VP8_STATUS_SUSPENDED when more
|
||||
// data is expected. Returns error in other cases.
|
||||
WEBP_EXTERN(VP8StatusCode) WebPIAppend(
|
||||
WebPIDecoder* idec, const uint8_t* data, size_t data_size);
|
||||
|
||||
// A variant of the above function to be used when data buffer contains
|
||||
// partial data from the beginning. In this case data buffer is not copied
|
||||
// to the internal memory.
|
||||
// Note that the value of the 'data' pointer can change between calls to
|
||||
// WebPIUpdate, for instance when the data buffer is resized to fit larger data.
|
||||
WEBP_EXTERN(VP8StatusCode) WebPIUpdate(
|
||||
WebPIDecoder* idec, const uint8_t* data, size_t data_size);
|
||||
|
||||
// Returns the RGB/A image decoded so far. Returns NULL if output params
|
||||
// are not initialized yet. The RGB/A output type corresponds to the colorspace
|
||||
// specified during call to WebPINewDecoder() or WebPINewRGB().
|
||||
// *last_y is the index of last decoded row in raster scan order. Some pointers
|
||||
// (*last_y, *width etc.) can be NULL if corresponding information is not
|
||||
// needed.
|
||||
WEBP_EXTERN(uint8_t*) WebPIDecGetRGB(
|
||||
const WebPIDecoder* idec, int* last_y,
|
||||
int* width, int* height, int* stride);
|
||||
|
||||
// Same as above function to get a YUVA image. Returns pointer to the luma
|
||||
// plane or NULL in case of error. If there is no alpha information
|
||||
// the alpha pointer '*a' will be returned NULL.
|
||||
WEBP_EXTERN(uint8_t*) WebPIDecGetYUVA(
|
||||
const WebPIDecoder* idec, int* last_y,
|
||||
uint8_t** u, uint8_t** v, uint8_t** a,
|
||||
int* width, int* height, int* stride, int* uv_stride, int* a_stride);
|
||||
|
||||
// Deprecated alpha-less version of WebPIDecGetYUVA(): it will ignore the
|
||||
// alpha information (if present). Kept for backward compatibility.
|
||||
static WEBP_INLINE uint8_t* WebPIDecGetYUV(
|
||||
const WebPIDecoder* idec, int* last_y, uint8_t** u, uint8_t** v,
|
||||
int* width, int* height, int* stride, int* uv_stride) {
|
||||
return WebPIDecGetYUVA(idec, last_y, u, v, NULL, width, height,
|
||||
stride, uv_stride, NULL);
|
||||
}
|
||||
|
||||
// Generic call to retrieve information about the displayable area.
|
||||
// If non NULL, the left/right/width/height pointers are filled with the visible
|
||||
// rectangular area so far.
|
||||
// Returns NULL in case the incremental decoder object is in an invalid state.
|
||||
// Otherwise returns the pointer to the internal representation. This structure
|
||||
// is read-only, tied to WebPIDecoder's lifespan and should not be modified.
|
||||
WEBP_EXTERN(const WebPDecBuffer*) WebPIDecodedArea(
|
||||
const WebPIDecoder* idec, int* left, int* top, int* width, int* height);
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Advanced decoding parametrization
|
||||
//
|
||||
// Code sample for using the advanced decoding API
|
||||
/*
|
||||
// A) Init a configuration object
|
||||
WebPDecoderConfig config;
|
||||
CHECK(WebPInitDecoderConfig(&config));
|
||||
|
||||
// B) optional: retrieve the bitstream's features.
|
||||
CHECK(WebPGetFeatures(data, data_size, &config.input) == VP8_STATUS_OK);
|
||||
|
||||
// C) Adjust 'config', if needed
|
||||
config.no_fancy_upsampling = 1;
|
||||
config.output.colorspace = MODE_BGRA;
|
||||
// etc.
|
||||
|
||||
// Note that you can also make config.output point to an externally
|
||||
// supplied memory buffer, provided it's big enough to store the decoded
|
||||
// picture. Otherwise, config.output will just be used to allocate memory
|
||||
// and store the decoded picture.
|
||||
|
||||
// D) Decode!
|
||||
CHECK(WebPDecode(data, data_size, &config) == VP8_STATUS_OK);
|
||||
|
||||
// E) Decoded image is now in config.output (and config.output.u.RGBA)
|
||||
|
||||
// F) Reclaim memory allocated in config's object. It's safe to call
|
||||
// this function even if the memory is external and wasn't allocated
|
||||
// by WebPDecode().
|
||||
WebPFreeDecBuffer(&config.output);
|
||||
*/
|
||||
|
||||
// Features gathered from the bitstream
|
||||
struct WebPBitstreamFeatures {
|
||||
int width; // Width in pixels, as read from the bitstream.
|
||||
int height; // Height in pixels, as read from the bitstream.
|
||||
int has_alpha; // True if the bitstream contains an alpha channel.
|
||||
int has_animation; // True if the bitstream is an animation.
|
||||
int format; // 0 = undefined (/mixed), 1 = lossy, 2 = lossless
|
||||
|
||||
uint32_t pad[5]; // padding for later use
|
||||
};
|
||||
|
||||
// Internal, version-checked, entry point
|
||||
WEBP_EXTERN(VP8StatusCode) WebPGetFeaturesInternal(
|
||||
const uint8_t*, size_t, WebPBitstreamFeatures*, int);
|
||||
|
||||
// Retrieve features from the bitstream. The *features structure is filled
|
||||
// with information gathered from the bitstream.
|
||||
// Returns VP8_STATUS_OK when the features are successfully retrieved. Returns
|
||||
// VP8_STATUS_NOT_ENOUGH_DATA when more data is needed to retrieve the
|
||||
// features from headers. Returns error in other cases.
|
||||
static WEBP_INLINE VP8StatusCode WebPGetFeatures(
|
||||
const uint8_t* data, size_t data_size,
|
||||
WebPBitstreamFeatures* features) {
|
||||
return WebPGetFeaturesInternal(data, data_size, features,
|
||||
WEBP_DECODER_ABI_VERSION);
|
||||
}
|
||||
|
||||
// Decoding options
|
||||
struct WebPDecoderOptions {
|
||||
int bypass_filtering; // if true, skip the in-loop filtering
|
||||
int no_fancy_upsampling; // if true, use faster pointwise upsampler
|
||||
int use_cropping; // if true, cropping is applied _first_
|
||||
int crop_left, crop_top; // top-left position for cropping.
|
||||
// Will be snapped to even values.
|
||||
int crop_width, crop_height; // dimension of the cropping area
|
||||
int use_scaling; // if true, scaling is applied _afterward_
|
||||
int scaled_width, scaled_height; // final resolution
|
||||
int use_threads; // if true, use multi-threaded decoding
|
||||
int dithering_strength; // dithering strength (0=Off, 100=full)
|
||||
int flip; // flip output vertically
|
||||
int alpha_dithering_strength; // alpha dithering strength in [0..100]
|
||||
|
||||
uint32_t pad[5]; // padding for later use
|
||||
};
|
||||
|
||||
// Main object storing the configuration for advanced decoding.
|
||||
struct WebPDecoderConfig {
|
||||
WebPBitstreamFeatures input; // Immutable bitstream features (optional)
|
||||
WebPDecBuffer output; // Output buffer (can point to external mem)
|
||||
WebPDecoderOptions options; // Decoding options
|
||||
};
|
||||
|
||||
// Internal, version-checked, entry point
|
||||
WEBP_EXTERN(int) WebPInitDecoderConfigInternal(WebPDecoderConfig*, int);
|
||||
|
||||
// Initialize the configuration as empty. This function must always be
|
||||
// called first, unless WebPGetFeatures() is to be called.
|
||||
// Returns false in case of mismatched version.
|
||||
static WEBP_INLINE int WebPInitDecoderConfig(WebPDecoderConfig* config) {
|
||||
return WebPInitDecoderConfigInternal(config, WEBP_DECODER_ABI_VERSION);
|
||||
}
|
||||
|
||||
// Instantiate a new incremental decoder object with the requested
|
||||
// configuration. The bitstream can be passed using 'data' and 'data_size'
|
||||
// parameter, in which case the features will be parsed and stored into
|
||||
// config->input. Otherwise, 'data' can be NULL and no parsing will occur.
|
||||
// Note that 'config' can be NULL too, in which case a default configuration
|
||||
// is used.
|
||||
// The return WebPIDecoder object must always be deleted calling WebPIDelete().
|
||||
// Returns NULL in case of error (and config->status will then reflect
|
||||
// the error condition).
|
||||
WEBP_EXTERN(WebPIDecoder*) WebPIDecode(const uint8_t* data, size_t data_size,
|
||||
WebPDecoderConfig* config);
|
||||
|
||||
// Non-incremental version. This version decodes the full data at once, taking
|
||||
// 'config' into account. Returns decoding status (which should be VP8_STATUS_OK
|
||||
// if the decoding was successful).
|
||||
WEBP_EXTERN(VP8StatusCode) WebPDecode(const uint8_t* data, size_t data_size,
|
||||
WebPDecoderConfig* config);
|
||||
|
||||
#ifdef __cplusplus
|
||||
} // extern "C"
|
||||
#endif
|
||||
|
||||
#endif /* WEBP_WEBP_DECODE_H_ */
|
358
Example/Pods/YYImage/Vendor/WebP.framework/Headers/demux.h
generated
vendored
Normal file
358
Example/Pods/YYImage/Vendor/WebP.framework/Headers/demux.h
generated
vendored
Normal file
@@ -0,0 +1,358 @@
|
||||
// Copyright 2012 Google Inc. All Rights Reserved.
|
||||
//
|
||||
// Use of this source code is governed by a BSD-style license
|
||||
// that can be found in the COPYING file in the root of the source
|
||||
// tree. An additional intellectual property rights grant can be found
|
||||
// in the file PATENTS. All contributing project authors may
|
||||
// be found in the AUTHORS file in the root of the source tree.
|
||||
// -----------------------------------------------------------------------------
|
||||
//
|
||||
// Demux API.
|
||||
// Enables extraction of image and extended format data from WebP files.
|
||||
|
||||
// Code Example: Demuxing WebP data to extract all the frames, ICC profile
|
||||
// and EXIF/XMP metadata.
|
||||
/*
|
||||
WebPDemuxer* demux = WebPDemux(&webp_data);
|
||||
|
||||
uint32_t width = WebPDemuxGetI(demux, WEBP_FF_CANVAS_WIDTH);
|
||||
uint32_t height = WebPDemuxGetI(demux, WEBP_FF_CANVAS_HEIGHT);
|
||||
// ... (Get information about the features present in the WebP file).
|
||||
uint32_t flags = WebPDemuxGetI(demux, WEBP_FF_FORMAT_FLAGS);
|
||||
|
||||
// ... (Iterate over all frames).
|
||||
WebPIterator iter;
|
||||
if (WebPDemuxGetFrame(demux, 1, &iter)) {
|
||||
do {
|
||||
// ... (Consume 'iter'; e.g. Decode 'iter.fragment' with WebPDecode(),
|
||||
// ... and get other frame properties like width, height, offsets etc.
|
||||
// ... see 'struct WebPIterator' below for more info).
|
||||
} while (WebPDemuxNextFrame(&iter));
|
||||
WebPDemuxReleaseIterator(&iter);
|
||||
}
|
||||
|
||||
// ... (Extract metadata).
|
||||
WebPChunkIterator chunk_iter;
|
||||
if (flags & ICCP_FLAG) WebPDemuxGetChunk(demux, "ICCP", 1, &chunk_iter);
|
||||
// ... (Consume the ICC profile in 'chunk_iter.chunk').
|
||||
WebPDemuxReleaseChunkIterator(&chunk_iter);
|
||||
if (flags & EXIF_FLAG) WebPDemuxGetChunk(demux, "EXIF", 1, &chunk_iter);
|
||||
// ... (Consume the EXIF metadata in 'chunk_iter.chunk').
|
||||
WebPDemuxReleaseChunkIterator(&chunk_iter);
|
||||
if (flags & XMP_FLAG) WebPDemuxGetChunk(demux, "XMP ", 1, &chunk_iter);
|
||||
// ... (Consume the XMP metadata in 'chunk_iter.chunk').
|
||||
WebPDemuxReleaseChunkIterator(&chunk_iter);
|
||||
WebPDemuxDelete(demux);
|
||||
*/
|
||||
|
||||
#ifndef WEBP_WEBP_DEMUX_H_
|
||||
#define WEBP_WEBP_DEMUX_H_
|
||||
|
||||
#include "./decode.h" // for WEBP_CSP_MODE
|
||||
#include "./mux_types.h"
|
||||
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
#define WEBP_DEMUX_ABI_VERSION 0x0107 // MAJOR(8b) + MINOR(8b)
|
||||
|
||||
// Note: forward declaring enumerations is not allowed in (strict) C and C++,
|
||||
// the types are left here for reference.
|
||||
// typedef enum WebPDemuxState WebPDemuxState;
|
||||
// typedef enum WebPFormatFeature WebPFormatFeature;
|
||||
typedef struct WebPDemuxer WebPDemuxer;
|
||||
typedef struct WebPIterator WebPIterator;
|
||||
typedef struct WebPChunkIterator WebPChunkIterator;
|
||||
typedef struct WebPAnimInfo WebPAnimInfo;
|
||||
typedef struct WebPAnimDecoderOptions WebPAnimDecoderOptions;
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
// Returns the version number of the demux library, packed in hexadecimal using
|
||||
// 8bits for each of major/minor/revision. E.g: v2.5.7 is 0x020507.
|
||||
WEBP_EXTERN(int) WebPGetDemuxVersion(void);
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Life of a Demux object
|
||||
|
||||
typedef enum WebPDemuxState {
|
||||
WEBP_DEMUX_PARSE_ERROR = -1, // An error occurred while parsing.
|
||||
WEBP_DEMUX_PARSING_HEADER = 0, // Not enough data to parse full header.
|
||||
WEBP_DEMUX_PARSED_HEADER = 1, // Header parsing complete,
|
||||
// data may be available.
|
||||
WEBP_DEMUX_DONE = 2 // Entire file has been parsed.
|
||||
} WebPDemuxState;
|
||||
|
||||
// Internal, version-checked, entry point
|
||||
WEBP_EXTERN(WebPDemuxer*) WebPDemuxInternal(
|
||||
const WebPData*, int, WebPDemuxState*, int);
|
||||
|
||||
// Parses the full WebP file given by 'data'. For single images the WebP file
|
||||
// header alone or the file header and the chunk header may be absent.
|
||||
// Returns a WebPDemuxer object on successful parse, NULL otherwise.
|
||||
static WEBP_INLINE WebPDemuxer* WebPDemux(const WebPData* data) {
|
||||
return WebPDemuxInternal(data, 0, NULL, WEBP_DEMUX_ABI_VERSION);
|
||||
}
|
||||
|
||||
// Parses the possibly incomplete WebP file given by 'data'.
|
||||
// If 'state' is non-NULL it will be set to indicate the status of the demuxer.
|
||||
// Returns NULL in case of error or if there isn't enough data to start parsing;
|
||||
// and a WebPDemuxer object on successful parse.
|
||||
// Note that WebPDemuxer keeps internal pointers to 'data' memory segment.
|
||||
// If this data is volatile, the demuxer object should be deleted (by calling
|
||||
// WebPDemuxDelete()) and WebPDemuxPartial() called again on the new data.
|
||||
// This is usually an inexpensive operation.
|
||||
static WEBP_INLINE WebPDemuxer* WebPDemuxPartial(
|
||||
const WebPData* data, WebPDemuxState* state) {
|
||||
return WebPDemuxInternal(data, 1, state, WEBP_DEMUX_ABI_VERSION);
|
||||
}
|
||||
|
||||
// Frees memory associated with 'dmux'.
|
||||
WEBP_EXTERN(void) WebPDemuxDelete(WebPDemuxer* dmux);
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Data/information extraction.
|
||||
|
||||
typedef enum WebPFormatFeature {
|
||||
WEBP_FF_FORMAT_FLAGS, // Extended format flags present in the 'VP8X' chunk.
|
||||
WEBP_FF_CANVAS_WIDTH,
|
||||
WEBP_FF_CANVAS_HEIGHT,
|
||||
WEBP_FF_LOOP_COUNT,
|
||||
WEBP_FF_BACKGROUND_COLOR,
|
||||
WEBP_FF_FRAME_COUNT // Number of frames present in the demux object.
|
||||
// In case of a partial demux, this is the number of
|
||||
// frames seen so far, with the last frame possibly
|
||||
// being partial.
|
||||
} WebPFormatFeature;
|
||||
|
||||
// Get the 'feature' value from the 'dmux'.
|
||||
// NOTE: values are only valid if WebPDemux() was used or WebPDemuxPartial()
|
||||
// returned a state > WEBP_DEMUX_PARSING_HEADER.
|
||||
WEBP_EXTERN(uint32_t) WebPDemuxGetI(
|
||||
const WebPDemuxer* dmux, WebPFormatFeature feature);
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Frame iteration.
|
||||
|
||||
struct WebPIterator {
|
||||
int frame_num;
|
||||
int num_frames; // equivalent to WEBP_FF_FRAME_COUNT.
|
||||
int x_offset, y_offset; // offset relative to the canvas.
|
||||
int width, height; // dimensions of this frame.
|
||||
int duration; // display duration in milliseconds.
|
||||
WebPMuxAnimDispose dispose_method; // dispose method for the frame.
|
||||
int complete; // true if 'fragment' contains a full frame. partial images
|
||||
// may still be decoded with the WebP incremental decoder.
|
||||
WebPData fragment; // The frame given by 'frame_num'. Note for historical
|
||||
// reasons this is called a fragment.
|
||||
int has_alpha; // True if the frame contains transparency.
|
||||
WebPMuxAnimBlend blend_method; // Blend operation for the frame.
|
||||
|
||||
uint32_t pad[2]; // padding for later use.
|
||||
void* private_; // for internal use only.
|
||||
};
|
||||
|
||||
// Retrieves frame 'frame_number' from 'dmux'.
|
||||
// 'iter->fragment' points to the frame on return from this function.
|
||||
// Setting 'frame_number' equal to 0 will return the last frame of the image.
|
||||
// Returns false if 'dmux' is NULL or frame 'frame_number' is not present.
|
||||
// Call WebPDemuxReleaseIterator() when use of the iterator is complete.
|
||||
// NOTE: 'dmux' must persist for the lifetime of 'iter'.
|
||||
WEBP_EXTERN(int) WebPDemuxGetFrame(
|
||||
const WebPDemuxer* dmux, int frame_number, WebPIterator* iter);
|
||||
|
||||
// Sets 'iter->fragment' to point to the next ('iter->frame_num' + 1) or
|
||||
// previous ('iter->frame_num' - 1) frame. These functions do not loop.
|
||||
// Returns true on success, false otherwise.
|
||||
WEBP_EXTERN(int) WebPDemuxNextFrame(WebPIterator* iter);
|
||||
WEBP_EXTERN(int) WebPDemuxPrevFrame(WebPIterator* iter);
|
||||
|
||||
// Releases any memory associated with 'iter'.
|
||||
// Must be called before any subsequent calls to WebPDemuxGetChunk() on the same
|
||||
// iter. Also, must be called before destroying the associated WebPDemuxer with
|
||||
// WebPDemuxDelete().
|
||||
WEBP_EXTERN(void) WebPDemuxReleaseIterator(WebPIterator* iter);
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Chunk iteration.
|
||||
|
||||
struct WebPChunkIterator {
|
||||
// The current and total number of chunks with the fourcc given to
|
||||
// WebPDemuxGetChunk().
|
||||
int chunk_num;
|
||||
int num_chunks;
|
||||
WebPData chunk; // The payload of the chunk.
|
||||
|
||||
uint32_t pad[6]; // padding for later use
|
||||
void* private_;
|
||||
};
|
||||
|
||||
// Retrieves the 'chunk_number' instance of the chunk with id 'fourcc' from
|
||||
// 'dmux'.
|
||||
// 'fourcc' is a character array containing the fourcc of the chunk to return,
|
||||
// e.g., "ICCP", "XMP ", "EXIF", etc.
|
||||
// Setting 'chunk_number' equal to 0 will return the last chunk in a set.
|
||||
// Returns true if the chunk is found, false otherwise. Image related chunk
|
||||
// payloads are accessed through WebPDemuxGetFrame() and related functions.
|
||||
// Call WebPDemuxReleaseChunkIterator() when use of the iterator is complete.
|
||||
// NOTE: 'dmux' must persist for the lifetime of the iterator.
|
||||
WEBP_EXTERN(int) WebPDemuxGetChunk(const WebPDemuxer* dmux,
|
||||
const char fourcc[4], int chunk_number,
|
||||
WebPChunkIterator* iter);
|
||||
|
||||
// Sets 'iter->chunk' to point to the next ('iter->chunk_num' + 1) or previous
|
||||
// ('iter->chunk_num' - 1) chunk. These functions do not loop.
|
||||
// Returns true on success, false otherwise.
|
||||
WEBP_EXTERN(int) WebPDemuxNextChunk(WebPChunkIterator* iter);
|
||||
WEBP_EXTERN(int) WebPDemuxPrevChunk(WebPChunkIterator* iter);
|
||||
|
||||
// Releases any memory associated with 'iter'.
|
||||
// Must be called before destroying the associated WebPDemuxer with
|
||||
// WebPDemuxDelete().
|
||||
WEBP_EXTERN(void) WebPDemuxReleaseChunkIterator(WebPChunkIterator* iter);
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// WebPAnimDecoder API
|
||||
//
|
||||
// This API allows decoding (possibly) animated WebP images.
|
||||
//
|
||||
// Code Example:
|
||||
/*
|
||||
WebPAnimDecoderOptions dec_options;
|
||||
WebPAnimDecoderOptionsInit(&dec_options);
|
||||
// Tune 'dec_options' as needed.
|
||||
WebPAnimDecoder* dec = WebPAnimDecoderNew(webp_data, &dec_options);
|
||||
WebPAnimInfo anim_info;
|
||||
WebPAnimDecoderGetInfo(dec, &anim_info);
|
||||
for (uint32_t i = 0; i < anim_info.loop_count; ++i) {
|
||||
while (WebPAnimDecoderHasMoreFrames(dec)) {
|
||||
uint8_t* buf;
|
||||
int timestamp;
|
||||
WebPAnimDecoderGetNext(dec, &buf, ×tamp);
|
||||
// ... (Render 'buf' based on 'timestamp').
|
||||
// ... (Do NOT free 'buf', as it is owned by 'dec').
|
||||
}
|
||||
WebPAnimDecoderReset(dec);
|
||||
}
|
||||
const WebPDemuxer* demuxer = WebPAnimDecoderGetDemuxer(dec);
|
||||
// ... (Do something using 'demuxer'; e.g. get EXIF/XMP/ICC data).
|
||||
WebPAnimDecoderDelete(dec);
|
||||
*/
|
||||
|
||||
typedef struct WebPAnimDecoder WebPAnimDecoder; // Main opaque object.
|
||||
|
||||
// Global options.
|
||||
struct WebPAnimDecoderOptions {
|
||||
// Output colorspace. Only the following modes are supported:
|
||||
// MODE_RGBA, MODE_BGRA, MODE_rgbA and MODE_bgrA.
|
||||
WEBP_CSP_MODE color_mode;
|
||||
int use_threads; // If true, use multi-threaded decoding.
|
||||
uint32_t padding[7]; // Padding for later use.
|
||||
};
|
||||
|
||||
// Internal, version-checked, entry point.
|
||||
WEBP_EXTERN(int) WebPAnimDecoderOptionsInitInternal(
|
||||
WebPAnimDecoderOptions*, int);
|
||||
|
||||
// Should always be called, to initialize a fresh WebPAnimDecoderOptions
|
||||
// structure before modification. Returns false in case of version mismatch.
|
||||
// WebPAnimDecoderOptionsInit() must have succeeded before using the
|
||||
// 'dec_options' object.
|
||||
static WEBP_INLINE int WebPAnimDecoderOptionsInit(
|
||||
WebPAnimDecoderOptions* dec_options) {
|
||||
return WebPAnimDecoderOptionsInitInternal(dec_options,
|
||||
WEBP_DEMUX_ABI_VERSION);
|
||||
}
|
||||
|
||||
// Internal, version-checked, entry point.
|
||||
WEBP_EXTERN(WebPAnimDecoder*) WebPAnimDecoderNewInternal(
|
||||
const WebPData*, const WebPAnimDecoderOptions*, int);
|
||||
|
||||
// Creates and initializes a WebPAnimDecoder object.
|
||||
// Parameters:
|
||||
// webp_data - (in) WebP bitstream. This should remain unchanged during the
|
||||
// lifetime of the output WebPAnimDecoder object.
|
||||
// dec_options - (in) decoding options. Can be passed NULL to choose
|
||||
// reasonable defaults (in particular, color mode MODE_RGBA
|
||||
// will be picked).
|
||||
// Returns:
|
||||
// A pointer to the newly created WebPAnimDecoder object, or NULL in case of
|
||||
// parsing error, invalid option or memory error.
|
||||
static WEBP_INLINE WebPAnimDecoder* WebPAnimDecoderNew(
|
||||
const WebPData* webp_data, const WebPAnimDecoderOptions* dec_options) {
|
||||
return WebPAnimDecoderNewInternal(webp_data, dec_options,
|
||||
WEBP_DEMUX_ABI_VERSION);
|
||||
}
|
||||
|
||||
// Global information about the animation..
|
||||
struct WebPAnimInfo {
|
||||
uint32_t canvas_width;
|
||||
uint32_t canvas_height;
|
||||
uint32_t loop_count;
|
||||
uint32_t bgcolor;
|
||||
uint32_t frame_count;
|
||||
uint32_t pad[4]; // padding for later use
|
||||
};
|
||||
|
||||
// Get global information about the animation.
|
||||
// Parameters:
|
||||
// dec - (in) decoder instance to get information from.
|
||||
// info - (out) global information fetched from the animation.
|
||||
// Returns:
|
||||
// True on success.
|
||||
WEBP_EXTERN(int) WebPAnimDecoderGetInfo(const WebPAnimDecoder* dec,
|
||||
WebPAnimInfo* info);
|
||||
|
||||
// Fetch the next frame from 'dec' based on options supplied to
|
||||
// WebPAnimDecoderNew(). This will be a fully reconstructed canvas of size
|
||||
// 'canvas_width * 4 * canvas_height', and not just the frame sub-rectangle. The
|
||||
// returned buffer 'buf' is valid only until the next call to
|
||||
// WebPAnimDecoderGetNext(), WebPAnimDecoderReset() or WebPAnimDecoderDelete().
|
||||
// Parameters:
|
||||
// dec - (in/out) decoder instance from which the next frame is to be fetched.
|
||||
// buf - (out) decoded frame.
|
||||
// timestamp - (out) timestamp of the frame in milliseconds.
|
||||
// Returns:
|
||||
// False if any of the arguments are NULL, or if there is a parsing or
|
||||
// decoding error, or if there are no more frames. Otherwise, returns true.
|
||||
WEBP_EXTERN(int) WebPAnimDecoderGetNext(WebPAnimDecoder* dec,
|
||||
uint8_t** buf, int* timestamp);
|
||||
|
||||
// Check if there are more frames left to decode.
|
||||
// Parameters:
|
||||
// dec - (in) decoder instance to be checked.
|
||||
// Returns:
|
||||
// True if 'dec' is not NULL and some frames are yet to be decoded.
|
||||
// Otherwise, returns false.
|
||||
WEBP_EXTERN(int) WebPAnimDecoderHasMoreFrames(const WebPAnimDecoder* dec);
|
||||
|
||||
// Resets the WebPAnimDecoder object, so that next call to
|
||||
// WebPAnimDecoderGetNext() will restart decoding from 1st frame. This would be
|
||||
// helpful when all frames need to be decoded multiple times (e.g.
|
||||
// info.loop_count times) without destroying and recreating the 'dec' object.
|
||||
// Parameters:
|
||||
// dec - (in/out) decoder instance to be reset
|
||||
WEBP_EXTERN(void) WebPAnimDecoderReset(WebPAnimDecoder* dec);
|
||||
|
||||
// Grab the internal demuxer object.
|
||||
// Getting the demuxer object can be useful if one wants to use operations only
|
||||
// available through demuxer; e.g. to get XMP/EXIF/ICC metadata. The returned
|
||||
// demuxer object is owned by 'dec' and is valid only until the next call to
|
||||
// WebPAnimDecoderDelete().
|
||||
//
|
||||
// Parameters:
|
||||
// dec - (in) decoder instance from which the demuxer object is to be fetched.
|
||||
WEBP_EXTERN(const WebPDemuxer*) WebPAnimDecoderGetDemuxer(
|
||||
const WebPAnimDecoder* dec);
|
||||
|
||||
// Deletes the WebPAnimDecoder object.
|
||||
// Parameters:
|
||||
// dec - (in/out) decoder instance to be deleted
|
||||
WEBP_EXTERN(void) WebPAnimDecoderDelete(WebPAnimDecoder* dec);
|
||||
|
||||
#ifdef __cplusplus
|
||||
} // extern "C"
|
||||
#endif
|
||||
|
||||
#endif /* WEBP_WEBP_DEMUX_H_ */
|
527
Example/Pods/YYImage/Vendor/WebP.framework/Headers/encode.h
generated
vendored
Normal file
527
Example/Pods/YYImage/Vendor/WebP.framework/Headers/encode.h
generated
vendored
Normal file
@@ -0,0 +1,527 @@
|
||||
// Copyright 2011 Google Inc. All Rights Reserved.
|
||||
//
|
||||
// Use of this source code is governed by a BSD-style license
|
||||
// that can be found in the COPYING file in the root of the source
|
||||
// tree. An additional intellectual property rights grant can be found
|
||||
// in the file PATENTS. All contributing project authors may
|
||||
// be found in the AUTHORS file in the root of the source tree.
|
||||
// -----------------------------------------------------------------------------
|
||||
//
|
||||
// WebP encoder: main interface
|
||||
//
|
||||
// Author: Skal (pascal.massimino@gmail.com)
|
||||
|
||||
#ifndef WEBP_WEBP_ENCODE_H_
|
||||
#define WEBP_WEBP_ENCODE_H_
|
||||
|
||||
#include "./types.h"
|
||||
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
#define WEBP_ENCODER_ABI_VERSION 0x0209 // MAJOR(8b) + MINOR(8b)
|
||||
|
||||
// Note: forward declaring enumerations is not allowed in (strict) C and C++,
|
||||
// the types are left here for reference.
|
||||
// typedef enum WebPImageHint WebPImageHint;
|
||||
// typedef enum WebPEncCSP WebPEncCSP;
|
||||
// typedef enum WebPPreset WebPPreset;
|
||||
// typedef enum WebPEncodingError WebPEncodingError;
|
||||
typedef struct WebPConfig WebPConfig;
|
||||
typedef struct WebPPicture WebPPicture; // main structure for I/O
|
||||
typedef struct WebPAuxStats WebPAuxStats;
|
||||
typedef struct WebPMemoryWriter WebPMemoryWriter;
|
||||
|
||||
// Return the encoder's version number, packed in hexadecimal using 8bits for
|
||||
// each of major/minor/revision. E.g: v2.5.7 is 0x020507.
|
||||
WEBP_EXTERN(int) WebPGetEncoderVersion(void);
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// One-stop-shop call! No questions asked:
|
||||
|
||||
// Returns the size of the compressed data (pointed to by *output), or 0 if
|
||||
// an error occurred. The compressed data must be released by the caller
|
||||
// using the call 'WebPFree(*output)'.
|
||||
// These functions compress using the lossy format, and the quality_factor
|
||||
// can go from 0 (smaller output, lower quality) to 100 (best quality,
|
||||
// larger output).
|
||||
WEBP_EXTERN(size_t) WebPEncodeRGB(const uint8_t* rgb,
|
||||
int width, int height, int stride,
|
||||
float quality_factor, uint8_t** output);
|
||||
WEBP_EXTERN(size_t) WebPEncodeBGR(const uint8_t* bgr,
|
||||
int width, int height, int stride,
|
||||
float quality_factor, uint8_t** output);
|
||||
WEBP_EXTERN(size_t) WebPEncodeRGBA(const uint8_t* rgba,
|
||||
int width, int height, int stride,
|
||||
float quality_factor, uint8_t** output);
|
||||
WEBP_EXTERN(size_t) WebPEncodeBGRA(const uint8_t* bgra,
|
||||
int width, int height, int stride,
|
||||
float quality_factor, uint8_t** output);
|
||||
|
||||
// These functions are the equivalent of the above, but compressing in a
|
||||
// lossless manner. Files are usually larger than lossy format, but will
|
||||
// not suffer any compression loss.
|
||||
WEBP_EXTERN(size_t) WebPEncodeLosslessRGB(const uint8_t* rgb,
|
||||
int width, int height, int stride,
|
||||
uint8_t** output);
|
||||
WEBP_EXTERN(size_t) WebPEncodeLosslessBGR(const uint8_t* bgr,
|
||||
int width, int height, int stride,
|
||||
uint8_t** output);
|
||||
WEBP_EXTERN(size_t) WebPEncodeLosslessRGBA(const uint8_t* rgba,
|
||||
int width, int height, int stride,
|
||||
uint8_t** output);
|
||||
WEBP_EXTERN(size_t) WebPEncodeLosslessBGRA(const uint8_t* bgra,
|
||||
int width, int height, int stride,
|
||||
uint8_t** output);
|
||||
|
||||
// Releases memory returned by the WebPEncode*() functions above.
|
||||
WEBP_EXTERN(void) WebPFree(void* ptr);
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Coding parameters
|
||||
|
||||
// Image characteristics hint for the underlying encoder.
|
||||
typedef enum WebPImageHint {
|
||||
WEBP_HINT_DEFAULT = 0, // default preset.
|
||||
WEBP_HINT_PICTURE, // digital picture, like portrait, inner shot
|
||||
WEBP_HINT_PHOTO, // outdoor photograph, with natural lighting
|
||||
WEBP_HINT_GRAPH, // Discrete tone image (graph, map-tile etc).
|
||||
WEBP_HINT_LAST
|
||||
} WebPImageHint;
|
||||
|
||||
// Compression parameters.
|
||||
struct WebPConfig {
|
||||
int lossless; // Lossless encoding (0=lossy(default), 1=lossless).
|
||||
float quality; // between 0 (smallest file) and 100 (biggest)
|
||||
int method; // quality/speed trade-off (0=fast, 6=slower-better)
|
||||
|
||||
WebPImageHint image_hint; // Hint for image type (lossless only for now).
|
||||
|
||||
// Parameters related to lossy compression only:
|
||||
int target_size; // if non-zero, set the desired target size in bytes.
|
||||
// Takes precedence over the 'compression' parameter.
|
||||
float target_PSNR; // if non-zero, specifies the minimal distortion to
|
||||
// try to achieve. Takes precedence over target_size.
|
||||
int segments; // maximum number of segments to use, in [1..4]
|
||||
int sns_strength; // Spatial Noise Shaping. 0=off, 100=maximum.
|
||||
int filter_strength; // range: [0 = off .. 100 = strongest]
|
||||
int filter_sharpness; // range: [0 = off .. 7 = least sharp]
|
||||
int filter_type; // filtering type: 0 = simple, 1 = strong (only used
|
||||
// if filter_strength > 0 or autofilter > 0)
|
||||
int autofilter; // Auto adjust filter's strength [0 = off, 1 = on]
|
||||
int alpha_compression; // Algorithm for encoding the alpha plane (0 = none,
|
||||
// 1 = compressed with WebP lossless). Default is 1.
|
||||
int alpha_filtering; // Predictive filtering method for alpha plane.
|
||||
// 0: none, 1: fast, 2: best. Default if 1.
|
||||
int alpha_quality; // Between 0 (smallest size) and 100 (lossless).
|
||||
// Default is 100.
|
||||
int pass; // number of entropy-analysis passes (in [1..10]).
|
||||
|
||||
int show_compressed; // if true, export the compressed picture back.
|
||||
// In-loop filtering is not applied.
|
||||
int preprocessing; // preprocessing filter:
|
||||
// 0=none, 1=segment-smooth, 2=pseudo-random dithering
|
||||
int partitions; // log2(number of token partitions) in [0..3]. Default
|
||||
// is set to 0 for easier progressive decoding.
|
||||
int partition_limit; // quality degradation allowed to fit the 512k limit
|
||||
// on prediction modes coding (0: no degradation,
|
||||
// 100: maximum possible degradation).
|
||||
int emulate_jpeg_size; // If true, compression parameters will be remapped
|
||||
// to better match the expected output size from
|
||||
// JPEG compression. Generally, the output size will
|
||||
// be similar but the degradation will be lower.
|
||||
int thread_level; // If non-zero, try and use multi-threaded encoding.
|
||||
int low_memory; // If set, reduce memory usage (but increase CPU use).
|
||||
|
||||
int near_lossless; // Near lossless encoding [0 = off(default) .. 100].
|
||||
// This feature is experimental.
|
||||
int exact; // if non-zero, preserve the exact RGB values under
|
||||
// transparent area. Otherwise, discard this invisible
|
||||
// RGB information for better compression. The default
|
||||
// value is 0.
|
||||
|
||||
#ifdef WEBP_EXPERIMENTAL_FEATURES
|
||||
int delta_palettization;
|
||||
uint32_t pad[2]; // padding for later use
|
||||
#else
|
||||
uint32_t pad[3]; // padding for later use
|
||||
#endif // WEBP_EXPERIMENTAL_FEATURES
|
||||
};
|
||||
|
||||
// Enumerate some predefined settings for WebPConfig, depending on the type
|
||||
// of source picture. These presets are used when calling WebPConfigPreset().
|
||||
typedef enum WebPPreset {
|
||||
WEBP_PRESET_DEFAULT = 0, // default preset.
|
||||
WEBP_PRESET_PICTURE, // digital picture, like portrait, inner shot
|
||||
WEBP_PRESET_PHOTO, // outdoor photograph, with natural lighting
|
||||
WEBP_PRESET_DRAWING, // hand or line drawing, with high-contrast details
|
||||
WEBP_PRESET_ICON, // small-sized colorful images
|
||||
WEBP_PRESET_TEXT // text-like
|
||||
} WebPPreset;
|
||||
|
||||
// Internal, version-checked, entry point
|
||||
WEBP_EXTERN(int) WebPConfigInitInternal(WebPConfig*, WebPPreset, float, int);
|
||||
|
||||
// Should always be called, to initialize a fresh WebPConfig structure before
|
||||
// modification. Returns false in case of version mismatch. WebPConfigInit()
|
||||
// must have succeeded before using the 'config' object.
|
||||
// Note that the default values are lossless=0 and quality=75.
|
||||
static WEBP_INLINE int WebPConfigInit(WebPConfig* config) {
|
||||
return WebPConfigInitInternal(config, WEBP_PRESET_DEFAULT, 75.f,
|
||||
WEBP_ENCODER_ABI_VERSION);
|
||||
}
|
||||
|
||||
// This function will initialize the configuration according to a predefined
|
||||
// set of parameters (referred to by 'preset') and a given quality factor.
|
||||
// This function can be called as a replacement to WebPConfigInit(). Will
|
||||
// return false in case of error.
|
||||
static WEBP_INLINE int WebPConfigPreset(WebPConfig* config,
|
||||
WebPPreset preset, float quality) {
|
||||
return WebPConfigInitInternal(config, preset, quality,
|
||||
WEBP_ENCODER_ABI_VERSION);
|
||||
}
|
||||
|
||||
// Activate the lossless compression mode with the desired efficiency level
|
||||
// between 0 (fastest, lowest compression) and 9 (slower, best compression).
|
||||
// A good default level is '6', providing a fair tradeoff between compression
|
||||
// speed and final compressed size.
|
||||
// This function will overwrite several fields from config: 'method', 'quality'
|
||||
// and 'lossless'. Returns false in case of parameter error.
|
||||
WEBP_EXTERN(int) WebPConfigLosslessPreset(WebPConfig* config, int level);
|
||||
|
||||
// Returns true if 'config' is non-NULL and all configuration parameters are
|
||||
// within their valid ranges.
|
||||
WEBP_EXTERN(int) WebPValidateConfig(const WebPConfig* config);
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Input / Output
|
||||
// Structure for storing auxiliary statistics (mostly for lossy encoding).
|
||||
|
||||
struct WebPAuxStats {
|
||||
int coded_size; // final size
|
||||
|
||||
float PSNR[5]; // peak-signal-to-noise ratio for Y/U/V/All/Alpha
|
||||
int block_count[3]; // number of intra4/intra16/skipped macroblocks
|
||||
int header_bytes[2]; // approximate number of bytes spent for header
|
||||
// and mode-partition #0
|
||||
int residual_bytes[3][4]; // approximate number of bytes spent for
|
||||
// DC/AC/uv coefficients for each (0..3) segments.
|
||||
int segment_size[4]; // number of macroblocks in each segments
|
||||
int segment_quant[4]; // quantizer values for each segments
|
||||
int segment_level[4]; // filtering strength for each segments [0..63]
|
||||
|
||||
int alpha_data_size; // size of the transparency data
|
||||
int layer_data_size; // size of the enhancement layer data
|
||||
|
||||
// lossless encoder statistics
|
||||
uint32_t lossless_features; // bit0:predictor bit1:cross-color transform
|
||||
// bit2:subtract-green bit3:color indexing
|
||||
int histogram_bits; // number of precision bits of histogram
|
||||
int transform_bits; // precision bits for transform
|
||||
int cache_bits; // number of bits for color cache lookup
|
||||
int palette_size; // number of color in palette, if used
|
||||
int lossless_size; // final lossless size
|
||||
int lossless_hdr_size; // lossless header (transform, huffman etc) size
|
||||
int lossless_data_size; // lossless image data size
|
||||
|
||||
uint32_t pad[2]; // padding for later use
|
||||
};
|
||||
|
||||
// Signature for output function. Should return true if writing was successful.
|
||||
// data/data_size is the segment of data to write, and 'picture' is for
|
||||
// reference (and so one can make use of picture->custom_ptr).
|
||||
typedef int (*WebPWriterFunction)(const uint8_t* data, size_t data_size,
|
||||
const WebPPicture* picture);
|
||||
|
||||
// WebPMemoryWrite: a special WebPWriterFunction that writes to memory using
|
||||
// the following WebPMemoryWriter object (to be set as a custom_ptr).
|
||||
struct WebPMemoryWriter {
|
||||
uint8_t* mem; // final buffer (of size 'max_size', larger than 'size').
|
||||
size_t size; // final size
|
||||
size_t max_size; // total capacity
|
||||
uint32_t pad[1]; // padding for later use
|
||||
};
|
||||
|
||||
// The following must be called first before any use.
|
||||
WEBP_EXTERN(void) WebPMemoryWriterInit(WebPMemoryWriter* writer);
|
||||
|
||||
// The following must be called to deallocate writer->mem memory. The 'writer'
|
||||
// object itself is not deallocated.
|
||||
WEBP_EXTERN(void) WebPMemoryWriterClear(WebPMemoryWriter* writer);
|
||||
// The custom writer to be used with WebPMemoryWriter as custom_ptr. Upon
|
||||
// completion, writer.mem and writer.size will hold the coded data.
|
||||
// writer.mem must be freed by calling WebPMemoryWriterClear.
|
||||
WEBP_EXTERN(int) WebPMemoryWrite(const uint8_t* data, size_t data_size,
|
||||
const WebPPicture* picture);
|
||||
|
||||
// Progress hook, called from time to time to report progress. It can return
|
||||
// false to request an abort of the encoding process, or true otherwise if
|
||||
// everything is OK.
|
||||
typedef int (*WebPProgressHook)(int percent, const WebPPicture* picture);
|
||||
|
||||
// Color spaces.
|
||||
typedef enum WebPEncCSP {
|
||||
// chroma sampling
|
||||
WEBP_YUV420 = 0, // 4:2:0
|
||||
WEBP_YUV420A = 4, // alpha channel variant
|
||||
WEBP_CSP_UV_MASK = 3, // bit-mask to get the UV sampling factors
|
||||
WEBP_CSP_ALPHA_BIT = 4 // bit that is set if alpha is present
|
||||
} WebPEncCSP;
|
||||
|
||||
// Encoding error conditions.
|
||||
typedef enum WebPEncodingError {
|
||||
VP8_ENC_OK = 0,
|
||||
VP8_ENC_ERROR_OUT_OF_MEMORY, // memory error allocating objects
|
||||
VP8_ENC_ERROR_BITSTREAM_OUT_OF_MEMORY, // memory error while flushing bits
|
||||
VP8_ENC_ERROR_NULL_PARAMETER, // a pointer parameter is NULL
|
||||
VP8_ENC_ERROR_INVALID_CONFIGURATION, // configuration is invalid
|
||||
VP8_ENC_ERROR_BAD_DIMENSION, // picture has invalid width/height
|
||||
VP8_ENC_ERROR_PARTITION0_OVERFLOW, // partition is bigger than 512k
|
||||
VP8_ENC_ERROR_PARTITION_OVERFLOW, // partition is bigger than 16M
|
||||
VP8_ENC_ERROR_BAD_WRITE, // error while flushing bytes
|
||||
VP8_ENC_ERROR_FILE_TOO_BIG, // file is bigger than 4G
|
||||
VP8_ENC_ERROR_USER_ABORT, // abort request by user
|
||||
VP8_ENC_ERROR_LAST // list terminator. always last.
|
||||
} WebPEncodingError;
|
||||
|
||||
// maximum width/height allowed (inclusive), in pixels
|
||||
#define WEBP_MAX_DIMENSION 16383
|
||||
|
||||
// Main exchange structure (input samples, output bytes, statistics)
|
||||
struct WebPPicture {
|
||||
// INPUT
|
||||
//////////////
|
||||
// Main flag for encoder selecting between ARGB or YUV input.
|
||||
// It is recommended to use ARGB input (*argb, argb_stride) for lossless
|
||||
// compression, and YUV input (*y, *u, *v, etc.) for lossy compression
|
||||
// since these are the respective native colorspace for these formats.
|
||||
int use_argb;
|
||||
|
||||
// YUV input (mostly used for input to lossy compression)
|
||||
WebPEncCSP colorspace; // colorspace: should be YUV420 for now (=Y'CbCr).
|
||||
int width, height; // dimensions (less or equal to WEBP_MAX_DIMENSION)
|
||||
uint8_t *y, *u, *v; // pointers to luma/chroma planes.
|
||||
int y_stride, uv_stride; // luma/chroma strides.
|
||||
uint8_t* a; // pointer to the alpha plane
|
||||
int a_stride; // stride of the alpha plane
|
||||
uint32_t pad1[2]; // padding for later use
|
||||
|
||||
// ARGB input (mostly used for input to lossless compression)
|
||||
uint32_t* argb; // Pointer to argb (32 bit) plane.
|
||||
int argb_stride; // This is stride in pixels units, not bytes.
|
||||
uint32_t pad2[3]; // padding for later use
|
||||
|
||||
// OUTPUT
|
||||
///////////////
|
||||
// Byte-emission hook, to store compressed bytes as they are ready.
|
||||
WebPWriterFunction writer; // can be NULL
|
||||
void* custom_ptr; // can be used by the writer.
|
||||
|
||||
// map for extra information (only for lossy compression mode)
|
||||
int extra_info_type; // 1: intra type, 2: segment, 3: quant
|
||||
// 4: intra-16 prediction mode,
|
||||
// 5: chroma prediction mode,
|
||||
// 6: bit cost, 7: distortion
|
||||
uint8_t* extra_info; // if not NULL, points to an array of size
|
||||
// ((width + 15) / 16) * ((height + 15) / 16) that
|
||||
// will be filled with a macroblock map, depending
|
||||
// on extra_info_type.
|
||||
|
||||
// STATS AND REPORTS
|
||||
///////////////////////////
|
||||
// Pointer to side statistics (updated only if not NULL)
|
||||
WebPAuxStats* stats;
|
||||
|
||||
// Error code for the latest error encountered during encoding
|
||||
WebPEncodingError error_code;
|
||||
|
||||
// If not NULL, report progress during encoding.
|
||||
WebPProgressHook progress_hook;
|
||||
|
||||
void* user_data; // this field is free to be set to any value and
|
||||
// used during callbacks (like progress-report e.g.).
|
||||
|
||||
uint32_t pad3[3]; // padding for later use
|
||||
|
||||
// Unused for now
|
||||
uint8_t *pad4, *pad5;
|
||||
uint32_t pad6[8]; // padding for later use
|
||||
|
||||
// PRIVATE FIELDS
|
||||
////////////////////
|
||||
void* memory_; // row chunk of memory for yuva planes
|
||||
void* memory_argb_; // and for argb too.
|
||||
void* pad7[2]; // padding for later use
|
||||
};
|
||||
|
||||
// Internal, version-checked, entry point
|
||||
WEBP_EXTERN(int) WebPPictureInitInternal(WebPPicture*, int);
|
||||
|
||||
// Should always be called, to initialize the structure. Returns false in case
|
||||
// of version mismatch. WebPPictureInit() must have succeeded before using the
|
||||
// 'picture' object.
|
||||
// Note that, by default, use_argb is false and colorspace is WEBP_YUV420.
|
||||
static WEBP_INLINE int WebPPictureInit(WebPPicture* picture) {
|
||||
return WebPPictureInitInternal(picture, WEBP_ENCODER_ABI_VERSION);
|
||||
}
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// WebPPicture utils
|
||||
|
||||
// Convenience allocation / deallocation based on picture->width/height:
|
||||
// Allocate y/u/v buffers as per colorspace/width/height specification.
|
||||
// Note! This function will free the previous buffer if needed.
|
||||
// Returns false in case of memory error.
|
||||
WEBP_EXTERN(int) WebPPictureAlloc(WebPPicture* picture);
|
||||
|
||||
// Release the memory allocated by WebPPictureAlloc() or WebPPictureImport*().
|
||||
// Note that this function does _not_ free the memory used by the 'picture'
|
||||
// object itself.
|
||||
// Besides memory (which is reclaimed) all other fields of 'picture' are
|
||||
// preserved.
|
||||
WEBP_EXTERN(void) WebPPictureFree(WebPPicture* picture);
|
||||
|
||||
// Copy the pixels of *src into *dst, using WebPPictureAlloc. Upon return, *dst
|
||||
// will fully own the copied pixels (this is not a view). The 'dst' picture need
|
||||
// not be initialized as its content is overwritten.
|
||||
// Returns false in case of memory allocation error.
|
||||
WEBP_EXTERN(int) WebPPictureCopy(const WebPPicture* src, WebPPicture* dst);
|
||||
|
||||
// Compute PSNR, SSIM or LSIM distortion metric between two pictures. Results
|
||||
// are in dB, stored in result[] in the Y/U/V/Alpha/All or B/G/R/A/All order.
|
||||
// Returns false in case of error (src and ref don't have same dimension, ...)
|
||||
// Warning: this function is rather CPU-intensive.
|
||||
WEBP_EXTERN(int) WebPPictureDistortion(
|
||||
const WebPPicture* src, const WebPPicture* ref,
|
||||
int metric_type, // 0 = PSNR, 1 = SSIM, 2 = LSIM
|
||||
float result[5]);
|
||||
|
||||
// self-crops a picture to the rectangle defined by top/left/width/height.
|
||||
// Returns false in case of memory allocation error, or if the rectangle is
|
||||
// outside of the source picture.
|
||||
// The rectangle for the view is defined by the top-left corner pixel
|
||||
// coordinates (left, top) as well as its width and height. This rectangle
|
||||
// must be fully be comprised inside the 'src' source picture. If the source
|
||||
// picture uses the YUV420 colorspace, the top and left coordinates will be
|
||||
// snapped to even values.
|
||||
WEBP_EXTERN(int) WebPPictureCrop(WebPPicture* picture,
|
||||
int left, int top, int width, int height);
|
||||
|
||||
// Extracts a view from 'src' picture into 'dst'. The rectangle for the view
|
||||
// is defined by the top-left corner pixel coordinates (left, top) as well
|
||||
// as its width and height. This rectangle must be fully be comprised inside
|
||||
// the 'src' source picture. If the source picture uses the YUV420 colorspace,
|
||||
// the top and left coordinates will be snapped to even values.
|
||||
// Picture 'src' must out-live 'dst' picture. Self-extraction of view is allowed
|
||||
// ('src' equal to 'dst') as a mean of fast-cropping (but note that doing so,
|
||||
// the original dimension will be lost). Picture 'dst' need not be initialized
|
||||
// with WebPPictureInit() if it is different from 'src', since its content will
|
||||
// be overwritten.
|
||||
// Returns false in case of memory allocation error or invalid parameters.
|
||||
WEBP_EXTERN(int) WebPPictureView(const WebPPicture* src,
|
||||
int left, int top, int width, int height,
|
||||
WebPPicture* dst);
|
||||
|
||||
// Returns true if the 'picture' is actually a view and therefore does
|
||||
// not own the memory for pixels.
|
||||
WEBP_EXTERN(int) WebPPictureIsView(const WebPPicture* picture);
|
||||
|
||||
// Rescale a picture to new dimension width x height.
|
||||
// If either 'width' or 'height' (but not both) is 0 the corresponding
|
||||
// dimension will be calculated preserving the aspect ratio.
|
||||
// No gamma correction is applied.
|
||||
// Returns false in case of error (invalid parameter or insufficient memory).
|
||||
WEBP_EXTERN(int) WebPPictureRescale(WebPPicture* pic, int width, int height);
|
||||
|
||||
// Colorspace conversion function to import RGB samples.
|
||||
// Previous buffer will be free'd, if any.
|
||||
// *rgb buffer should have a size of at least height * rgb_stride.
|
||||
// Returns false in case of memory error.
|
||||
WEBP_EXTERN(int) WebPPictureImportRGB(
|
||||
WebPPicture* picture, const uint8_t* rgb, int rgb_stride);
|
||||
// Same, but for RGBA buffer.
|
||||
WEBP_EXTERN(int) WebPPictureImportRGBA(
|
||||
WebPPicture* picture, const uint8_t* rgba, int rgba_stride);
|
||||
// Same, but for RGBA buffer. Imports the RGB direct from the 32-bit format
|
||||
// input buffer ignoring the alpha channel. Avoids needing to copy the data
|
||||
// to a temporary 24-bit RGB buffer to import the RGB only.
|
||||
WEBP_EXTERN(int) WebPPictureImportRGBX(
|
||||
WebPPicture* picture, const uint8_t* rgbx, int rgbx_stride);
|
||||
|
||||
// Variants of the above, but taking BGR(A|X) input.
|
||||
WEBP_EXTERN(int) WebPPictureImportBGR(
|
||||
WebPPicture* picture, const uint8_t* bgr, int bgr_stride);
|
||||
WEBP_EXTERN(int) WebPPictureImportBGRA(
|
||||
WebPPicture* picture, const uint8_t* bgra, int bgra_stride);
|
||||
WEBP_EXTERN(int) WebPPictureImportBGRX(
|
||||
WebPPicture* picture, const uint8_t* bgrx, int bgrx_stride);
|
||||
|
||||
// Converts picture->argb data to the YUV420A format. The 'colorspace'
|
||||
// parameter is deprecated and should be equal to WEBP_YUV420.
|
||||
// Upon return, picture->use_argb is set to false. The presence of real
|
||||
// non-opaque transparent values is detected, and 'colorspace' will be
|
||||
// adjusted accordingly. Note that this method is lossy.
|
||||
// Returns false in case of error.
|
||||
WEBP_EXTERN(int) WebPPictureARGBToYUVA(WebPPicture* picture,
|
||||
WebPEncCSP /*colorspace = WEBP_YUV420*/);
|
||||
|
||||
// Same as WebPPictureARGBToYUVA(), but the conversion is done using
|
||||
// pseudo-random dithering with a strength 'dithering' between
|
||||
// 0.0 (no dithering) and 1.0 (maximum dithering). This is useful
|
||||
// for photographic picture.
|
||||
WEBP_EXTERN(int) WebPPictureARGBToYUVADithered(
|
||||
WebPPicture* picture, WebPEncCSP colorspace, float dithering);
|
||||
|
||||
// Performs 'smart' RGBA->YUVA420 downsampling and colorspace conversion.
|
||||
// Downsampling is handled with extra care in case of color clipping. This
|
||||
// method is roughly 2x slower than WebPPictureARGBToYUVA() but produces better
|
||||
// YUV representation.
|
||||
// Returns false in case of error.
|
||||
WEBP_EXTERN(int) WebPPictureSmartARGBToYUVA(WebPPicture* picture);
|
||||
|
||||
// Converts picture->yuv to picture->argb and sets picture->use_argb to true.
|
||||
// The input format must be YUV_420 or YUV_420A.
|
||||
// Note that the use of this method is discouraged if one has access to the
|
||||
// raw ARGB samples, since using YUV420 is comparatively lossy. Also, the
|
||||
// conversion from YUV420 to ARGB incurs a small loss too.
|
||||
// Returns false in case of error.
|
||||
WEBP_EXTERN(int) WebPPictureYUVAToARGB(WebPPicture* picture);
|
||||
|
||||
// Helper function: given a width x height plane of RGBA or YUV(A) samples
|
||||
// clean-up the YUV or RGB samples under fully transparent area, to help
|
||||
// compressibility (no guarantee, though).
|
||||
WEBP_EXTERN(void) WebPCleanupTransparentArea(WebPPicture* picture);
|
||||
|
||||
// Scan the picture 'picture' for the presence of non fully opaque alpha values.
|
||||
// Returns true in such case. Otherwise returns false (indicating that the
|
||||
// alpha plane can be ignored altogether e.g.).
|
||||
WEBP_EXTERN(int) WebPPictureHasTransparency(const WebPPicture* picture);
|
||||
|
||||
// Remove the transparency information (if present) by blending the color with
|
||||
// the background color 'background_rgb' (specified as 24bit RGB triplet).
|
||||
// After this call, all alpha values are reset to 0xff.
|
||||
WEBP_EXTERN(void) WebPBlendAlpha(WebPPicture* pic, uint32_t background_rgb);
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Main call
|
||||
|
||||
// Main encoding call, after config and picture have been initialized.
|
||||
// 'picture' must be less than 16384x16384 in dimension (cf WEBP_MAX_DIMENSION),
|
||||
// and the 'config' object must be a valid one.
|
||||
// Returns false in case of error, true otherwise.
|
||||
// In case of error, picture->error_code is updated accordingly.
|
||||
// 'picture' can hold the source samples in both YUV(A) or ARGB input, depending
|
||||
// on the value of 'picture->use_argb'. It is highly recommended to use
|
||||
// the former for lossy encoding, and the latter for lossless encoding
|
||||
// (when config.lossless is true). Automatic conversion from one format to
|
||||
// another is provided but they both incur some loss.
|
||||
WEBP_EXTERN(int) WebPEncode(const WebPConfig* config, WebPPicture* picture);
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
#ifdef __cplusplus
|
||||
} // extern "C"
|
||||
#endif
|
||||
|
||||
#endif /* WEBP_WEBP_ENCODE_H_ */
|
51
Example/Pods/YYImage/Vendor/WebP.framework/Headers/extras.h
generated
vendored
Normal file
51
Example/Pods/YYImage/Vendor/WebP.framework/Headers/extras.h
generated
vendored
Normal file
@@ -0,0 +1,51 @@
|
||||
// Copyright 2015 Google Inc. All Rights Reserved.
|
||||
//
|
||||
// Use of this source code is governed by a BSD-style license
|
||||
// that can be found in the COPYING file in the root of the source
|
||||
// tree. An additional intellectual property rights grant can be found
|
||||
// in the file PATENTS. All contributing project authors may
|
||||
// be found in the AUTHORS file in the root of the source tree.
|
||||
// -----------------------------------------------------------------------------
|
||||
//
|
||||
|
||||
#ifndef WEBP_WEBP_EXTRAS_H_
|
||||
#define WEBP_WEBP_EXTRAS_H_
|
||||
|
||||
#include "./types.h"
|
||||
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
#include "./encode.h"
|
||||
|
||||
#define WEBP_EXTRAS_ABI_VERSION 0x0000 // MAJOR(8b) + MINOR(8b)
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
// Returns the version number of the extras library, packed in hexadecimal using
|
||||
// 8bits for each of major/minor/revision. E.g: v2.5.7 is 0x020507.
|
||||
WEBP_EXTERN(int) WebPGetExtrasVersion(void);
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Ad-hoc colorspace importers.
|
||||
|
||||
// Import luma sample (gray scale image) into 'picture'. The 'picture'
|
||||
// width and height must be set prior to calling this function.
|
||||
WEBP_EXTERN(int) WebPImportGray(const uint8_t* gray, WebPPicture* picture);
|
||||
|
||||
// Import rgb sample in RGB565 packed format into 'picture'. The 'picture'
|
||||
// width and height must be set prior to calling this function.
|
||||
WEBP_EXTERN(int) WebPImportRGB565(const uint8_t* rgb565, WebPPicture* pic);
|
||||
|
||||
// Import rgb sample in RGB4444 packed format into 'picture'. The 'picture'
|
||||
// width and height must be set prior to calling this function.
|
||||
WEBP_EXTERN(int) WebPImportRGB4444(const uint8_t* rgb4444, WebPPicture* pic);
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
#ifdef __cplusplus
|
||||
} // extern "C"
|
||||
#endif
|
||||
|
||||
#endif /* WEBP_WEBP_EXTRAS_H_ */
|
88
Example/Pods/YYImage/Vendor/WebP.framework/Headers/format_constants.h
generated
vendored
Normal file
88
Example/Pods/YYImage/Vendor/WebP.framework/Headers/format_constants.h
generated
vendored
Normal file
@@ -0,0 +1,88 @@
|
||||
// Copyright 2012 Google Inc. All Rights Reserved.
|
||||
//
|
||||
// Use of this source code is governed by a BSD-style license
|
||||
// that can be found in the COPYING file in the root of the source
|
||||
// tree. An additional intellectual property rights grant can be found
|
||||
// in the file PATENTS. All contributing project authors may
|
||||
// be found in the AUTHORS file in the root of the source tree.
|
||||
// -----------------------------------------------------------------------------
|
||||
//
|
||||
// Internal header for constants related to WebP file format.
|
||||
//
|
||||
// Author: Urvang (urvang@google.com)
|
||||
|
||||
#ifndef WEBP_WEBP_FORMAT_CONSTANTS_H_
|
||||
#define WEBP_WEBP_FORMAT_CONSTANTS_H_
|
||||
|
||||
// Create fourcc of the chunk from the chunk tag characters.
|
||||
#define MKFOURCC(a, b, c, d) ((a) | (b) << 8 | (c) << 16 | (uint32_t)(d) << 24)
|
||||
|
||||
// VP8 related constants.
|
||||
#define VP8_SIGNATURE 0x9d012a // Signature in VP8 data.
|
||||
#define VP8_MAX_PARTITION0_SIZE (1 << 19) // max size of mode partition
|
||||
#define VP8_MAX_PARTITION_SIZE (1 << 24) // max size for token partition
|
||||
#define VP8_FRAME_HEADER_SIZE 10 // Size of the frame header within VP8 data.
|
||||
|
||||
// VP8L related constants.
|
||||
#define VP8L_SIGNATURE_SIZE 1 // VP8L signature size.
|
||||
#define VP8L_MAGIC_BYTE 0x2f // VP8L signature byte.
|
||||
#define VP8L_IMAGE_SIZE_BITS 14 // Number of bits used to store
|
||||
// width and height.
|
||||
#define VP8L_VERSION_BITS 3 // 3 bits reserved for version.
|
||||
#define VP8L_VERSION 0 // version 0
|
||||
#define VP8L_FRAME_HEADER_SIZE 5 // Size of the VP8L frame header.
|
||||
|
||||
#define MAX_PALETTE_SIZE 256
|
||||
#define MAX_CACHE_BITS 11
|
||||
#define HUFFMAN_CODES_PER_META_CODE 5
|
||||
#define ARGB_BLACK 0xff000000
|
||||
|
||||
#define DEFAULT_CODE_LENGTH 8
|
||||
#define MAX_ALLOWED_CODE_LENGTH 15
|
||||
|
||||
#define NUM_LITERAL_CODES 256
|
||||
#define NUM_LENGTH_CODES 24
|
||||
#define NUM_DISTANCE_CODES 40
|
||||
#define CODE_LENGTH_CODES 19
|
||||
|
||||
#define MIN_HUFFMAN_BITS 2 // min number of Huffman bits
|
||||
#define MAX_HUFFMAN_BITS 9 // max number of Huffman bits
|
||||
|
||||
#define TRANSFORM_PRESENT 1 // The bit to be written when next data
|
||||
// to be read is a transform.
|
||||
#define NUM_TRANSFORMS 4 // Maximum number of allowed transform
|
||||
// in a bitstream.
|
||||
typedef enum {
|
||||
PREDICTOR_TRANSFORM = 0,
|
||||
CROSS_COLOR_TRANSFORM = 1,
|
||||
SUBTRACT_GREEN = 2,
|
||||
COLOR_INDEXING_TRANSFORM = 3
|
||||
} VP8LImageTransformType;
|
||||
|
||||
// Alpha related constants.
|
||||
#define ALPHA_HEADER_LEN 1
|
||||
#define ALPHA_NO_COMPRESSION 0
|
||||
#define ALPHA_LOSSLESS_COMPRESSION 1
|
||||
#define ALPHA_PREPROCESSED_LEVELS 1
|
||||
|
||||
// Mux related constants.
|
||||
#define TAG_SIZE 4 // Size of a chunk tag (e.g. "VP8L").
|
||||
#define CHUNK_SIZE_BYTES 4 // Size needed to store chunk's size.
|
||||
#define CHUNK_HEADER_SIZE 8 // Size of a chunk header.
|
||||
#define RIFF_HEADER_SIZE 12 // Size of the RIFF header ("RIFFnnnnWEBP").
|
||||
#define ANMF_CHUNK_SIZE 16 // Size of an ANMF chunk.
|
||||
#define ANIM_CHUNK_SIZE 6 // Size of an ANIM chunk.
|
||||
#define FRGM_CHUNK_SIZE 6 // Size of a FRGM chunk.
|
||||
#define VP8X_CHUNK_SIZE 10 // Size of a VP8X chunk.
|
||||
|
||||
#define MAX_CANVAS_SIZE (1 << 24) // 24-bit max for VP8X width/height.
|
||||
#define MAX_IMAGE_AREA (1ULL << 32) // 32-bit max for width x height.
|
||||
#define MAX_LOOP_COUNT (1 << 16) // maximum value for loop-count
|
||||
#define MAX_DURATION (1 << 24) // maximum duration
|
||||
#define MAX_POSITION_OFFSET (1 << 24) // maximum frame/fragment x/y offset
|
||||
|
||||
// Maximum chunk payload is such that adding the header and padding won't
|
||||
// overflow a uint32_t.
|
||||
#define MAX_CHUNK_PAYLOAD (~0U - CHUNK_HEADER_SIZE - 1)
|
||||
|
||||
#endif /* WEBP_WEBP_FORMAT_CONSTANTS_H_ */
|
530
Example/Pods/YYImage/Vendor/WebP.framework/Headers/mux.h
generated
vendored
Normal file
530
Example/Pods/YYImage/Vendor/WebP.framework/Headers/mux.h
generated
vendored
Normal file
@@ -0,0 +1,530 @@
|
||||
// Copyright 2011 Google Inc. All Rights Reserved.
|
||||
//
|
||||
// Use of this source code is governed by a BSD-style license
|
||||
// that can be found in the COPYING file in the root of the source
|
||||
// tree. An additional intellectual property rights grant can be found
|
||||
// in the file PATENTS. All contributing project authors may
|
||||
// be found in the AUTHORS file in the root of the source tree.
|
||||
// -----------------------------------------------------------------------------
|
||||
//
|
||||
// RIFF container manipulation and encoding for WebP images.
|
||||
//
|
||||
// Authors: Urvang (urvang@google.com)
|
||||
// Vikas (vikasa@google.com)
|
||||
|
||||
#ifndef WEBP_WEBP_MUX_H_
|
||||
#define WEBP_WEBP_MUX_H_
|
||||
|
||||
#include "./mux_types.h"
|
||||
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
#define WEBP_MUX_ABI_VERSION 0x0106 // MAJOR(8b) + MINOR(8b)
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Mux API
|
||||
//
|
||||
// This API allows manipulation of WebP container images containing features
|
||||
// like color profile, metadata, animation and fragmented images.
|
||||
//
|
||||
// Code Example#1: Create a WebPMux object with image data, color profile and
|
||||
// XMP metadata.
|
||||
/*
|
||||
int copy_data = 0;
|
||||
WebPMux* mux = WebPMuxNew();
|
||||
// ... (Prepare image data).
|
||||
WebPMuxSetImage(mux, &image, copy_data);
|
||||
// ... (Prepare ICCP color profile data).
|
||||
WebPMuxSetChunk(mux, "ICCP", &icc_profile, copy_data);
|
||||
// ... (Prepare XMP metadata).
|
||||
WebPMuxSetChunk(mux, "XMP ", &xmp, copy_data);
|
||||
// Get data from mux in WebP RIFF format.
|
||||
WebPMuxAssemble(mux, &output_data);
|
||||
WebPMuxDelete(mux);
|
||||
// ... (Consume output_data; e.g. write output_data.bytes to file).
|
||||
WebPDataClear(&output_data);
|
||||
*/
|
||||
|
||||
// Code Example#2: Get image and color profile data from a WebP file.
|
||||
/*
|
||||
int copy_data = 0;
|
||||
// ... (Read data from file).
|
||||
WebPMux* mux = WebPMuxCreate(&data, copy_data);
|
||||
WebPMuxGetFrame(mux, 1, &image);
|
||||
// ... (Consume image; e.g. call WebPDecode() to decode the data).
|
||||
WebPMuxGetChunk(mux, "ICCP", &icc_profile);
|
||||
// ... (Consume icc_data).
|
||||
WebPMuxDelete(mux);
|
||||
free(data);
|
||||
*/
|
||||
|
||||
// Note: forward declaring enumerations is not allowed in (strict) C and C++,
|
||||
// the types are left here for reference.
|
||||
// typedef enum WebPMuxError WebPMuxError;
|
||||
// typedef enum WebPChunkId WebPChunkId;
|
||||
typedef struct WebPMux WebPMux; // main opaque object.
|
||||
typedef struct WebPMuxFrameInfo WebPMuxFrameInfo;
|
||||
typedef struct WebPMuxAnimParams WebPMuxAnimParams;
|
||||
typedef struct WebPAnimEncoderOptions WebPAnimEncoderOptions;
|
||||
|
||||
// Error codes
|
||||
typedef enum WebPMuxError {
|
||||
WEBP_MUX_OK = 1,
|
||||
WEBP_MUX_NOT_FOUND = 0,
|
||||
WEBP_MUX_INVALID_ARGUMENT = -1,
|
||||
WEBP_MUX_BAD_DATA = -2,
|
||||
WEBP_MUX_MEMORY_ERROR = -3,
|
||||
WEBP_MUX_NOT_ENOUGH_DATA = -4
|
||||
} WebPMuxError;
|
||||
|
||||
// IDs for different types of chunks.
|
||||
typedef enum WebPChunkId {
|
||||
WEBP_CHUNK_VP8X, // VP8X
|
||||
WEBP_CHUNK_ICCP, // ICCP
|
||||
WEBP_CHUNK_ANIM, // ANIM
|
||||
WEBP_CHUNK_ANMF, // ANMF
|
||||
WEBP_CHUNK_FRGM, // FRGM
|
||||
WEBP_CHUNK_ALPHA, // ALPH
|
||||
WEBP_CHUNK_IMAGE, // VP8/VP8L
|
||||
WEBP_CHUNK_EXIF, // EXIF
|
||||
WEBP_CHUNK_XMP, // XMP
|
||||
WEBP_CHUNK_UNKNOWN, // Other chunks.
|
||||
WEBP_CHUNK_NIL
|
||||
} WebPChunkId;
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
// Returns the version number of the mux library, packed in hexadecimal using
|
||||
// 8bits for each of major/minor/revision. E.g: v2.5.7 is 0x020507.
|
||||
WEBP_EXTERN(int) WebPGetMuxVersion(void);
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Life of a Mux object
|
||||
|
||||
// Internal, version-checked, entry point
|
||||
WEBP_EXTERN(WebPMux*) WebPNewInternal(int);
|
||||
|
||||
// Creates an empty mux object.
|
||||
// Returns:
|
||||
// A pointer to the newly created empty mux object.
|
||||
// Or NULL in case of memory error.
|
||||
static WEBP_INLINE WebPMux* WebPMuxNew(void) {
|
||||
return WebPNewInternal(WEBP_MUX_ABI_VERSION);
|
||||
}
|
||||
|
||||
// Deletes the mux object.
|
||||
// Parameters:
|
||||
// mux - (in/out) object to be deleted
|
||||
WEBP_EXTERN(void) WebPMuxDelete(WebPMux* mux);
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Mux creation.
|
||||
|
||||
// Internal, version-checked, entry point
|
||||
WEBP_EXTERN(WebPMux*) WebPMuxCreateInternal(const WebPData*, int, int);
|
||||
|
||||
// Creates a mux object from raw data given in WebP RIFF format.
|
||||
// Parameters:
|
||||
// bitstream - (in) the bitstream data in WebP RIFF format
|
||||
// copy_data - (in) value 1 indicates given data WILL be copied to the mux
|
||||
// object and value 0 indicates data will NOT be copied.
|
||||
// Returns:
|
||||
// A pointer to the mux object created from given data - on success.
|
||||
// NULL - In case of invalid data or memory error.
|
||||
static WEBP_INLINE WebPMux* WebPMuxCreate(const WebPData* bitstream,
|
||||
int copy_data) {
|
||||
return WebPMuxCreateInternal(bitstream, copy_data, WEBP_MUX_ABI_VERSION);
|
||||
}
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Non-image chunks.
|
||||
|
||||
// Note: Only non-image related chunks should be managed through chunk APIs.
|
||||
// (Image related chunks are: "ANMF", "FRGM", "VP8 ", "VP8L" and "ALPH").
|
||||
// To add, get and delete images, use WebPMuxSetImage(), WebPMuxPushFrame(),
|
||||
// WebPMuxGetFrame() and WebPMuxDeleteFrame().
|
||||
|
||||
// Adds a chunk with id 'fourcc' and data 'chunk_data' in the mux object.
|
||||
// Any existing chunk(s) with the same id will be removed.
|
||||
// Parameters:
|
||||
// mux - (in/out) object to which the chunk is to be added
|
||||
// fourcc - (in) a character array containing the fourcc of the given chunk;
|
||||
// e.g., "ICCP", "XMP ", "EXIF" etc.
|
||||
// chunk_data - (in) the chunk data to be added
|
||||
// copy_data - (in) value 1 indicates given data WILL be copied to the mux
|
||||
// object and value 0 indicates data will NOT be copied.
|
||||
// Returns:
|
||||
// WEBP_MUX_INVALID_ARGUMENT - if mux, fourcc or chunk_data is NULL
|
||||
// or if fourcc corresponds to an image chunk.
|
||||
// WEBP_MUX_MEMORY_ERROR - on memory allocation error.
|
||||
// WEBP_MUX_OK - on success.
|
||||
WEBP_EXTERN(WebPMuxError) WebPMuxSetChunk(
|
||||
WebPMux* mux, const char fourcc[4], const WebPData* chunk_data,
|
||||
int copy_data);
|
||||
|
||||
// Gets a reference to the data of the chunk with id 'fourcc' in the mux object.
|
||||
// The caller should NOT free the returned data.
|
||||
// Parameters:
|
||||
// mux - (in) object from which the chunk data is to be fetched
|
||||
// fourcc - (in) a character array containing the fourcc of the chunk;
|
||||
// e.g., "ICCP", "XMP ", "EXIF" etc.
|
||||
// chunk_data - (out) returned chunk data
|
||||
// Returns:
|
||||
// WEBP_MUX_INVALID_ARGUMENT - if mux, fourcc or chunk_data is NULL
|
||||
// or if fourcc corresponds to an image chunk.
|
||||
// WEBP_MUX_NOT_FOUND - If mux does not contain a chunk with the given id.
|
||||
// WEBP_MUX_OK - on success.
|
||||
WEBP_EXTERN(WebPMuxError) WebPMuxGetChunk(
|
||||
const WebPMux* mux, const char fourcc[4], WebPData* chunk_data);
|
||||
|
||||
// Deletes the chunk with the given 'fourcc' from the mux object.
|
||||
// Parameters:
|
||||
// mux - (in/out) object from which the chunk is to be deleted
|
||||
// fourcc - (in) a character array containing the fourcc of the chunk;
|
||||
// e.g., "ICCP", "XMP ", "EXIF" etc.
|
||||
// Returns:
|
||||
// WEBP_MUX_INVALID_ARGUMENT - if mux or fourcc is NULL
|
||||
// or if fourcc corresponds to an image chunk.
|
||||
// WEBP_MUX_NOT_FOUND - If mux does not contain a chunk with the given fourcc.
|
||||
// WEBP_MUX_OK - on success.
|
||||
WEBP_EXTERN(WebPMuxError) WebPMuxDeleteChunk(
|
||||
WebPMux* mux, const char fourcc[4]);
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Images.
|
||||
|
||||
// Encapsulates data about a single frame/fragment.
|
||||
struct WebPMuxFrameInfo {
|
||||
WebPData bitstream; // image data: can be a raw VP8/VP8L bitstream
|
||||
// or a single-image WebP file.
|
||||
int x_offset; // x-offset of the frame.
|
||||
int y_offset; // y-offset of the frame.
|
||||
int duration; // duration of the frame (in milliseconds).
|
||||
|
||||
WebPChunkId id; // frame type: should be one of WEBP_CHUNK_ANMF,
|
||||
// WEBP_CHUNK_FRGM or WEBP_CHUNK_IMAGE
|
||||
WebPMuxAnimDispose dispose_method; // Disposal method for the frame.
|
||||
WebPMuxAnimBlend blend_method; // Blend operation for the frame.
|
||||
uint32_t pad[1]; // padding for later use
|
||||
};
|
||||
|
||||
// Sets the (non-animated and non-fragmented) image in the mux object.
|
||||
// Note: Any existing images (including frames/fragments) will be removed.
|
||||
// Parameters:
|
||||
// mux - (in/out) object in which the image is to be set
|
||||
// bitstream - (in) can be a raw VP8/VP8L bitstream or a single-image
|
||||
// WebP file (non-animated and non-fragmented)
|
||||
// copy_data - (in) value 1 indicates given data WILL be copied to the mux
|
||||
// object and value 0 indicates data will NOT be copied.
|
||||
// Returns:
|
||||
// WEBP_MUX_INVALID_ARGUMENT - if mux is NULL or bitstream is NULL.
|
||||
// WEBP_MUX_MEMORY_ERROR - on memory allocation error.
|
||||
// WEBP_MUX_OK - on success.
|
||||
WEBP_EXTERN(WebPMuxError) WebPMuxSetImage(
|
||||
WebPMux* mux, const WebPData* bitstream, int copy_data);
|
||||
|
||||
// Adds a frame at the end of the mux object.
|
||||
// Notes: (1) frame.id should be one of WEBP_CHUNK_ANMF or WEBP_CHUNK_FRGM
|
||||
// (2) For setting a non-animated non-fragmented image, use
|
||||
// WebPMuxSetImage() instead.
|
||||
// (3) Type of frame being pushed must be same as the frames in mux.
|
||||
// (4) As WebP only supports even offsets, any odd offset will be snapped
|
||||
// to an even location using: offset &= ~1
|
||||
// Parameters:
|
||||
// mux - (in/out) object to which the frame is to be added
|
||||
// frame - (in) frame data.
|
||||
// copy_data - (in) value 1 indicates given data WILL be copied to the mux
|
||||
// object and value 0 indicates data will NOT be copied.
|
||||
// Returns:
|
||||
// WEBP_MUX_INVALID_ARGUMENT - if mux or frame is NULL
|
||||
// or if content of 'frame' is invalid.
|
||||
// WEBP_MUX_MEMORY_ERROR - on memory allocation error.
|
||||
// WEBP_MUX_OK - on success.
|
||||
WEBP_EXTERN(WebPMuxError) WebPMuxPushFrame(
|
||||
WebPMux* mux, const WebPMuxFrameInfo* frame, int copy_data);
|
||||
|
||||
// Gets the nth frame from the mux object.
|
||||
// The content of 'frame->bitstream' is allocated using malloc(), and NOT
|
||||
// owned by the 'mux' object. It MUST be deallocated by the caller by calling
|
||||
// WebPDataClear().
|
||||
// nth=0 has a special meaning - last position.
|
||||
// Parameters:
|
||||
// mux - (in) object from which the info is to be fetched
|
||||
// nth - (in) index of the frame in the mux object
|
||||
// frame - (out) data of the returned frame
|
||||
// Returns:
|
||||
// WEBP_MUX_INVALID_ARGUMENT - if mux or frame is NULL.
|
||||
// WEBP_MUX_NOT_FOUND - if there are less than nth frames in the mux object.
|
||||
// WEBP_MUX_BAD_DATA - if nth frame chunk in mux is invalid.
|
||||
// WEBP_MUX_MEMORY_ERROR - on memory allocation error.
|
||||
// WEBP_MUX_OK - on success.
|
||||
WEBP_EXTERN(WebPMuxError) WebPMuxGetFrame(
|
||||
const WebPMux* mux, uint32_t nth, WebPMuxFrameInfo* frame);
|
||||
|
||||
// Deletes a frame from the mux object.
|
||||
// nth=0 has a special meaning - last position.
|
||||
// Parameters:
|
||||
// mux - (in/out) object from which a frame is to be deleted
|
||||
// nth - (in) The position from which the frame is to be deleted
|
||||
// Returns:
|
||||
// WEBP_MUX_INVALID_ARGUMENT - if mux is NULL.
|
||||
// WEBP_MUX_NOT_FOUND - If there are less than nth frames in the mux object
|
||||
// before deletion.
|
||||
// WEBP_MUX_OK - on success.
|
||||
WEBP_EXTERN(WebPMuxError) WebPMuxDeleteFrame(WebPMux* mux, uint32_t nth);
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Animation.
|
||||
|
||||
// Animation parameters.
|
||||
struct WebPMuxAnimParams {
|
||||
uint32_t bgcolor; // Background color of the canvas stored (in MSB order) as:
|
||||
// Bits 00 to 07: Alpha.
|
||||
// Bits 08 to 15: Red.
|
||||
// Bits 16 to 23: Green.
|
||||
// Bits 24 to 31: Blue.
|
||||
int loop_count; // Number of times to repeat the animation [0 = infinite].
|
||||
};
|
||||
|
||||
// Sets the animation parameters in the mux object. Any existing ANIM chunks
|
||||
// will be removed.
|
||||
// Parameters:
|
||||
// mux - (in/out) object in which ANIM chunk is to be set/added
|
||||
// params - (in) animation parameters.
|
||||
// Returns:
|
||||
// WEBP_MUX_INVALID_ARGUMENT - if mux or params is NULL.
|
||||
// WEBP_MUX_MEMORY_ERROR - on memory allocation error.
|
||||
// WEBP_MUX_OK - on success.
|
||||
WEBP_EXTERN(WebPMuxError) WebPMuxSetAnimationParams(
|
||||
WebPMux* mux, const WebPMuxAnimParams* params);
|
||||
|
||||
// Gets the animation parameters from the mux object.
|
||||
// Parameters:
|
||||
// mux - (in) object from which the animation parameters to be fetched
|
||||
// params - (out) animation parameters extracted from the ANIM chunk
|
||||
// Returns:
|
||||
// WEBP_MUX_INVALID_ARGUMENT - if mux or params is NULL.
|
||||
// WEBP_MUX_NOT_FOUND - if ANIM chunk is not present in mux object.
|
||||
// WEBP_MUX_OK - on success.
|
||||
WEBP_EXTERN(WebPMuxError) WebPMuxGetAnimationParams(
|
||||
const WebPMux* mux, WebPMuxAnimParams* params);
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Misc Utilities.
|
||||
|
||||
// Sets the canvas size for the mux object. The width and height can be
|
||||
// specified explicitly or left as zero (0, 0).
|
||||
// * When width and height are specified explicitly, then this frame bound is
|
||||
// enforced during subsequent calls to WebPMuxAssemble() and an error is
|
||||
// reported if any animated frame does not completely fit within the canvas.
|
||||
// * When unspecified (0, 0), the constructed canvas will get the frame bounds
|
||||
// from the bounding-box over all frames after calling WebPMuxAssemble().
|
||||
// Parameters:
|
||||
// mux - (in) object to which the canvas size is to be set
|
||||
// width - (in) canvas width
|
||||
// height - (in) canvas height
|
||||
// Returns:
|
||||
// WEBP_MUX_INVALID_ARGUMENT - if mux is NULL; or
|
||||
// width or height are invalid or out of bounds
|
||||
// WEBP_MUX_OK - on success.
|
||||
WEBP_EXTERN(WebPMuxError) WebPMuxSetCanvasSize(WebPMux* mux,
|
||||
int width, int height);
|
||||
|
||||
// Gets the canvas size from the mux object.
|
||||
// Note: This method assumes that the VP8X chunk, if present, is up-to-date.
|
||||
// That is, the mux object hasn't been modified since the last call to
|
||||
// WebPMuxAssemble() or WebPMuxCreate().
|
||||
// Parameters:
|
||||
// mux - (in) object from which the canvas size is to be fetched
|
||||
// width - (out) canvas width
|
||||
// height - (out) canvas height
|
||||
// Returns:
|
||||
// WEBP_MUX_INVALID_ARGUMENT - if mux, width or height is NULL.
|
||||
// WEBP_MUX_BAD_DATA - if VP8X/VP8/VP8L chunk or canvas size is invalid.
|
||||
// WEBP_MUX_OK - on success.
|
||||
WEBP_EXTERN(WebPMuxError) WebPMuxGetCanvasSize(const WebPMux* mux,
|
||||
int* width, int* height);
|
||||
|
||||
// Gets the feature flags from the mux object.
|
||||
// Note: This method assumes that the VP8X chunk, if present, is up-to-date.
|
||||
// That is, the mux object hasn't been modified since the last call to
|
||||
// WebPMuxAssemble() or WebPMuxCreate().
|
||||
// Parameters:
|
||||
// mux - (in) object from which the features are to be fetched
|
||||
// flags - (out) the flags specifying which features are present in the
|
||||
// mux object. This will be an OR of various flag values.
|
||||
// Enum 'WebPFeatureFlags' can be used to test individual flag values.
|
||||
// Returns:
|
||||
// WEBP_MUX_INVALID_ARGUMENT - if mux or flags is NULL.
|
||||
// WEBP_MUX_BAD_DATA - if VP8X/VP8/VP8L chunk or canvas size is invalid.
|
||||
// WEBP_MUX_OK - on success.
|
||||
WEBP_EXTERN(WebPMuxError) WebPMuxGetFeatures(const WebPMux* mux,
|
||||
uint32_t* flags);
|
||||
|
||||
// Gets number of chunks with the given 'id' in the mux object.
|
||||
// Parameters:
|
||||
// mux - (in) object from which the info is to be fetched
|
||||
// id - (in) chunk id specifying the type of chunk
|
||||
// num_elements - (out) number of chunks with the given chunk id
|
||||
// Returns:
|
||||
// WEBP_MUX_INVALID_ARGUMENT - if mux, or num_elements is NULL.
|
||||
// WEBP_MUX_OK - on success.
|
||||
WEBP_EXTERN(WebPMuxError) WebPMuxNumChunks(const WebPMux* mux,
|
||||
WebPChunkId id, int* num_elements);
|
||||
|
||||
// Assembles all chunks in WebP RIFF format and returns in 'assembled_data'.
|
||||
// This function also validates the mux object.
|
||||
// Note: The content of 'assembled_data' will be ignored and overwritten.
|
||||
// Also, the content of 'assembled_data' is allocated using malloc(), and NOT
|
||||
// owned by the 'mux' object. It MUST be deallocated by the caller by calling
|
||||
// WebPDataClear(). It's always safe to call WebPDataClear() upon return,
|
||||
// even in case of error.
|
||||
// Parameters:
|
||||
// mux - (in/out) object whose chunks are to be assembled
|
||||
// assembled_data - (out) assembled WebP data
|
||||
// Returns:
|
||||
// WEBP_MUX_BAD_DATA - if mux object is invalid.
|
||||
// WEBP_MUX_INVALID_ARGUMENT - if mux or assembled_data is NULL.
|
||||
// WEBP_MUX_MEMORY_ERROR - on memory allocation error.
|
||||
// WEBP_MUX_OK - on success.
|
||||
WEBP_EXTERN(WebPMuxError) WebPMuxAssemble(WebPMux* mux,
|
||||
WebPData* assembled_data);
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// WebPAnimEncoder API
|
||||
//
|
||||
// This API allows encoding (possibly) animated WebP images.
|
||||
//
|
||||
// Code Example:
|
||||
/*
|
||||
WebPAnimEncoderOptions enc_options;
|
||||
WebPAnimEncoderOptionsInit(&enc_options);
|
||||
// Tune 'enc_options' as needed.
|
||||
WebPAnimEncoder* enc = WebPAnimEncoderNew(width, height, &enc_options);
|
||||
while(<there are more frames>) {
|
||||
WebPConfig config;
|
||||
WebPConfigInit(&config);
|
||||
// Tune 'config' as needed.
|
||||
WebPAnimEncoderAdd(enc, frame, timestamp_ms, &config);
|
||||
}
|
||||
WebPAnimEncoderAdd(enc, NULL, timestamp_ms, NULL);
|
||||
WebPAnimEncoderAssemble(enc, webp_data);
|
||||
WebPAnimEncoderDelete(enc);
|
||||
// Write the 'webp_data' to a file, or re-mux it further.
|
||||
*/
|
||||
|
||||
typedef struct WebPAnimEncoder WebPAnimEncoder; // Main opaque object.
|
||||
|
||||
// Forward declarations. Defined in encode.h.
|
||||
struct WebPPicture;
|
||||
struct WebPConfig;
|
||||
|
||||
// Global options.
|
||||
struct WebPAnimEncoderOptions {
|
||||
WebPMuxAnimParams anim_params; // Animation parameters.
|
||||
int minimize_size; // If true, minimize the output size (slow). Implicitly
|
||||
// disables key-frame insertion.
|
||||
int kmin;
|
||||
int kmax; // Minimum and maximum distance between consecutive key
|
||||
// frames in the output. The library may insert some key
|
||||
// frames as needed to satisfy this criteria.
|
||||
// Note that these conditions should hold: kmax > kmin
|
||||
// and kmin >= kmax / 2 + 1. Also, if kmin == 0, then
|
||||
// key-frame insertion is disabled; and if kmax == 0,
|
||||
// then all frames will be key-frames.
|
||||
int allow_mixed; // If true, use mixed compression mode; may choose
|
||||
// either lossy and lossless for each frame.
|
||||
int verbose; // If true, print info and warning messages to stderr.
|
||||
|
||||
uint32_t padding[4]; // Padding for later use.
|
||||
};
|
||||
|
||||
// Internal, version-checked, entry point.
|
||||
WEBP_EXTERN(int) WebPAnimEncoderOptionsInitInternal(
|
||||
WebPAnimEncoderOptions*, int);
|
||||
|
||||
// Should always be called, to initialize a fresh WebPAnimEncoderOptions
|
||||
// structure before modification. Returns false in case of version mismatch.
|
||||
// WebPAnimEncoderOptionsInit() must have succeeded before using the
|
||||
// 'enc_options' object.
|
||||
static WEBP_INLINE int WebPAnimEncoderOptionsInit(
|
||||
WebPAnimEncoderOptions* enc_options) {
|
||||
return WebPAnimEncoderOptionsInitInternal(enc_options, WEBP_MUX_ABI_VERSION);
|
||||
}
|
||||
|
||||
// Internal, version-checked, entry point.
|
||||
WEBP_EXTERN(WebPAnimEncoder*) WebPAnimEncoderNewInternal(
|
||||
int, int, const WebPAnimEncoderOptions*, int);
|
||||
|
||||
// Creates and initializes a WebPAnimEncoder object.
|
||||
// Parameters:
|
||||
// width/height - (in) canvas width and height of the animation.
|
||||
// enc_options - (in) encoding options; can be passed NULL to pick
|
||||
// reasonable defaults.
|
||||
// Returns:
|
||||
// A pointer to the newly created WebPAnimEncoder object.
|
||||
// Or NULL in case of memory error.
|
||||
static WEBP_INLINE WebPAnimEncoder* WebPAnimEncoderNew(
|
||||
int width, int height, const WebPAnimEncoderOptions* enc_options) {
|
||||
return WebPAnimEncoderNewInternal(width, height, enc_options,
|
||||
WEBP_MUX_ABI_VERSION);
|
||||
}
|
||||
|
||||
// Optimize the given frame for WebP, encode it and add it to the
|
||||
// WebPAnimEncoder object.
|
||||
// The last call to 'WebPAnimEncoderAdd' should be with frame = NULL, which
|
||||
// indicates that no more frames are to be added. This call is also used to
|
||||
// determine the duration of the last frame.
|
||||
// Parameters:
|
||||
// enc - (in/out) object to which the frame is to be added.
|
||||
// frame - (in/out) frame data in ARGB or YUV(A) format. If it is in YUV(A)
|
||||
// format, it will be converted to ARGB, which incurs a small loss.
|
||||
// timestamp_ms - (in) timestamp of this frame in milliseconds.
|
||||
// Duration of a frame would be calculated as
|
||||
// "timestamp of next frame - timestamp of this frame".
|
||||
// Hence, timestamps should be in non-decreasing order.
|
||||
// config - (in) encoding options; can be passed NULL to pick
|
||||
// reasonable defaults.
|
||||
// Returns:
|
||||
// On error, returns false and frame->error_code is set appropriately.
|
||||
// Otherwise, returns true.
|
||||
WEBP_EXTERN(int) WebPAnimEncoderAdd(
|
||||
WebPAnimEncoder* enc, struct WebPPicture* frame, int timestamp_ms,
|
||||
const struct WebPConfig* config);
|
||||
|
||||
// Assemble all frames added so far into a WebP bitstream.
|
||||
// This call should be preceded by a call to 'WebPAnimEncoderAdd' with
|
||||
// frame = NULL; if not, the duration of the last frame will be internally
|
||||
// estimated.
|
||||
// Parameters:
|
||||
// enc - (in/out) object from which the frames are to be assembled.
|
||||
// webp_data - (out) generated WebP bitstream.
|
||||
// Returns:
|
||||
// True on success.
|
||||
WEBP_EXTERN(int) WebPAnimEncoderAssemble(WebPAnimEncoder* enc,
|
||||
WebPData* webp_data);
|
||||
|
||||
// Get error string corresponding to the most recent call using 'enc'. The
|
||||
// returned string is owned by 'enc' and is valid only until the next call to
|
||||
// WebPAnimEncoderAdd() or WebPAnimEncoderAssemble() or WebPAnimEncoderDelete().
|
||||
// Parameters:
|
||||
// enc - (in/out) object from which the error string is to be fetched.
|
||||
// Returns:
|
||||
// NULL if 'enc' is NULL. Otherwise, returns the error string if the last call
|
||||
// to 'enc' had an error, or an empty string if the last call was a success.
|
||||
WEBP_EXTERN(const char*) WebPAnimEncoderGetError(WebPAnimEncoder* enc);
|
||||
|
||||
// Deletes the WebPAnimEncoder object.
|
||||
// Parameters:
|
||||
// enc - (in/out) object to be deleted
|
||||
WEBP_EXTERN(void) WebPAnimEncoderDelete(WebPAnimEncoder* enc);
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
#ifdef __cplusplus
|
||||
} // extern "C"
|
||||
#endif
|
||||
|
||||
#endif /* WEBP_WEBP_MUX_H_ */
|
97
Example/Pods/YYImage/Vendor/WebP.framework/Headers/mux_types.h
generated
vendored
Normal file
97
Example/Pods/YYImage/Vendor/WebP.framework/Headers/mux_types.h
generated
vendored
Normal file
@@ -0,0 +1,97 @@
|
||||
// Copyright 2012 Google Inc. All Rights Reserved.
|
||||
//
|
||||
// Use of this source code is governed by a BSD-style license
|
||||
// that can be found in the COPYING file in the root of the source
|
||||
// tree. An additional intellectual property rights grant can be found
|
||||
// in the file PATENTS. All contributing project authors may
|
||||
// be found in the AUTHORS file in the root of the source tree.
|
||||
// -----------------------------------------------------------------------------
|
||||
//
|
||||
// Data-types common to the mux and demux libraries.
|
||||
//
|
||||
// Author: Urvang (urvang@google.com)
|
||||
|
||||
#ifndef WEBP_WEBP_MUX_TYPES_H_
|
||||
#define WEBP_WEBP_MUX_TYPES_H_
|
||||
|
||||
#include <stdlib.h> // free()
|
||||
#include <string.h> // memset()
|
||||
#include "./types.h"
|
||||
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
// Note: forward declaring enumerations is not allowed in (strict) C and C++,
|
||||
// the types are left here for reference.
|
||||
// typedef enum WebPFeatureFlags WebPFeatureFlags;
|
||||
// typedef enum WebPMuxAnimDispose WebPMuxAnimDispose;
|
||||
// typedef enum WebPMuxAnimBlend WebPMuxAnimBlend;
|
||||
typedef struct WebPData WebPData;
|
||||
|
||||
// VP8X Feature Flags.
|
||||
typedef enum WebPFeatureFlags {
|
||||
FRAGMENTS_FLAG = 0x00000001,
|
||||
ANIMATION_FLAG = 0x00000002,
|
||||
XMP_FLAG = 0x00000004,
|
||||
EXIF_FLAG = 0x00000008,
|
||||
ALPHA_FLAG = 0x00000010,
|
||||
ICCP_FLAG = 0x00000020
|
||||
} WebPFeatureFlags;
|
||||
|
||||
// Dispose method (animation only). Indicates how the area used by the current
|
||||
// frame is to be treated before rendering the next frame on the canvas.
|
||||
typedef enum WebPMuxAnimDispose {
|
||||
WEBP_MUX_DISPOSE_NONE, // Do not dispose.
|
||||
WEBP_MUX_DISPOSE_BACKGROUND // Dispose to background color.
|
||||
} WebPMuxAnimDispose;
|
||||
|
||||
// Blend operation (animation only). Indicates how transparent pixels of the
|
||||
// current frame are blended with those of the previous canvas.
|
||||
typedef enum WebPMuxAnimBlend {
|
||||
WEBP_MUX_BLEND, // Blend.
|
||||
WEBP_MUX_NO_BLEND // Do not blend.
|
||||
} WebPMuxAnimBlend;
|
||||
|
||||
// Data type used to describe 'raw' data, e.g., chunk data
|
||||
// (ICC profile, metadata) and WebP compressed image data.
|
||||
struct WebPData {
|
||||
const uint8_t* bytes;
|
||||
size_t size;
|
||||
};
|
||||
|
||||
// Initializes the contents of the 'webp_data' object with default values.
|
||||
static WEBP_INLINE void WebPDataInit(WebPData* webp_data) {
|
||||
if (webp_data != NULL) {
|
||||
memset(webp_data, 0, sizeof(*webp_data));
|
||||
}
|
||||
}
|
||||
|
||||
// Clears the contents of the 'webp_data' object by calling free(). Does not
|
||||
// deallocate the object itself.
|
||||
static WEBP_INLINE void WebPDataClear(WebPData* webp_data) {
|
||||
if (webp_data != NULL) {
|
||||
free((void*)webp_data->bytes);
|
||||
WebPDataInit(webp_data);
|
||||
}
|
||||
}
|
||||
|
||||
// Allocates necessary storage for 'dst' and copies the contents of 'src'.
|
||||
// Returns true on success.
|
||||
static WEBP_INLINE int WebPDataCopy(const WebPData* src, WebPData* dst) {
|
||||
if (src == NULL || dst == NULL) return 0;
|
||||
WebPDataInit(dst);
|
||||
if (src->bytes != NULL && src->size != 0) {
|
||||
dst->bytes = (uint8_t*)malloc(src->size);
|
||||
if (dst->bytes == NULL) return 0;
|
||||
memcpy((void*)dst->bytes, src->bytes, src->size);
|
||||
dst->size = src->size;
|
||||
}
|
||||
return 1;
|
||||
}
|
||||
|
||||
#ifdef __cplusplus
|
||||
} // extern "C"
|
||||
#endif
|
||||
|
||||
#endif /* WEBP_WEBP_MUX_TYPES_H_ */
|
52
Example/Pods/YYImage/Vendor/WebP.framework/Headers/types.h
generated
vendored
Normal file
52
Example/Pods/YYImage/Vendor/WebP.framework/Headers/types.h
generated
vendored
Normal file
@@ -0,0 +1,52 @@
|
||||
// Copyright 2010 Google Inc. All Rights Reserved.
|
||||
//
|
||||
// Use of this source code is governed by a BSD-style license
|
||||
// that can be found in the COPYING file in the root of the source
|
||||
// tree. An additional intellectual property rights grant can be found
|
||||
// in the file PATENTS. All contributing project authors may
|
||||
// be found in the AUTHORS file in the root of the source tree.
|
||||
// -----------------------------------------------------------------------------
|
||||
//
|
||||
// Common types
|
||||
//
|
||||
// Author: Skal (pascal.massimino@gmail.com)
|
||||
|
||||
#ifndef WEBP_WEBP_TYPES_H_
|
||||
#define WEBP_WEBP_TYPES_H_
|
||||
|
||||
#include <stddef.h> // for size_t
|
||||
|
||||
#ifndef _MSC_VER
|
||||
#include <inttypes.h>
|
||||
#if defined(__cplusplus) || !defined(__STRICT_ANSI__) || \
|
||||
(defined(__STDC_VERSION__) && __STDC_VERSION__ >= 199901L)
|
||||
#define WEBP_INLINE inline
|
||||
#else
|
||||
#define WEBP_INLINE
|
||||
#endif
|
||||
#else
|
||||
typedef signed char int8_t;
|
||||
typedef unsigned char uint8_t;
|
||||
typedef signed short int16_t;
|
||||
typedef unsigned short uint16_t;
|
||||
typedef signed int int32_t;
|
||||
typedef unsigned int uint32_t;
|
||||
typedef unsigned long long int uint64_t;
|
||||
typedef long long int int64_t;
|
||||
#define WEBP_INLINE __forceinline
|
||||
#endif /* _MSC_VER */
|
||||
|
||||
#ifndef WEBP_EXTERN
|
||||
// This explicitly marks library functions and allows for changing the
|
||||
// signature for e.g., Windows DLL builds.
|
||||
# if defined(__GNUC__) && __GNUC__ >= 4
|
||||
# define WEBP_EXTERN(type) extern __attribute__ ((visibility ("default"))) type
|
||||
# else
|
||||
# define WEBP_EXTERN(type) extern type
|
||||
# endif /* __GNUC__ >= 4 */
|
||||
#endif /* WEBP_EXTERN */
|
||||
|
||||
// Macro to check ABI compatibility (same major revision number)
|
||||
#define WEBP_ABI_IS_INCOMPATIBLE(a, b) (((a) >> 8) != ((b) >> 8))
|
||||
|
||||
#endif /* WEBP_WEBP_TYPES_H_ */
|
BIN
Example/Pods/YYImage/Vendor/WebP.framework/WebP
generated
vendored
Normal file
BIN
Example/Pods/YYImage/Vendor/WebP.framework/WebP
generated
vendored
Normal file
Binary file not shown.
125
Example/Pods/YYImage/YYImage/YYAnimatedImageView.h
generated
Normal file
125
Example/Pods/YYImage/YYImage/YYAnimatedImageView.h
generated
Normal file
@@ -0,0 +1,125 @@
|
||||
//
|
||||
// YYAnimatedImageView.h
|
||||
// YYImage <https://github.com/ibireme/YYImage>
|
||||
//
|
||||
// Created by ibireme on 14/10/19.
|
||||
// Copyright (c) 2015 ibireme.
|
||||
//
|
||||
// This source code is licensed under the MIT-style license found in the
|
||||
// LICENSE file in the root directory of this source tree.
|
||||
//
|
||||
|
||||
#import <UIKit/UIKit.h>
|
||||
|
||||
NS_ASSUME_NONNULL_BEGIN
|
||||
|
||||
/**
|
||||
An image view for displaying animated image.
|
||||
|
||||
@discussion It is a fully compatible `UIImageView` subclass.
|
||||
If the `image` or `highlightedImage` property adopt to the `YYAnimatedImage` protocol,
|
||||
then it can be used to play the multi-frame animation. The animation can also be
|
||||
controlled with the UIImageView methods `-startAnimating`, `-stopAnimating` and `-isAnimating`.
|
||||
|
||||
This view request the frame data just in time. When the device has enough free memory,
|
||||
this view may cache some or all future frames in an inner buffer for lower CPU cost.
|
||||
Buffer size is dynamically adjusted based on the current state of the device memory.
|
||||
|
||||
Sample Code:
|
||||
|
||||
// ani@3x.gif
|
||||
YYImage *image = [YYImage imageNamed:@"ani"];
|
||||
YYAnimatedImageView *imageView = [YYAnimatedImageView alloc] initWithImage:image];
|
||||
[view addSubView:imageView];
|
||||
*/
|
||||
@interface YYAnimatedImageView : UIImageView
|
||||
|
||||
/**
|
||||
If the image has more than one frame, set this value to `YES` will automatically
|
||||
play/stop the animation when the view become visible/invisible.
|
||||
|
||||
The default value is `YES`.
|
||||
*/
|
||||
@property (nonatomic) BOOL autoPlayAnimatedImage;
|
||||
|
||||
/**
|
||||
Index of the currently displayed frame (index from 0).
|
||||
|
||||
Set a new value to this property will cause to display the new frame immediately.
|
||||
If the new value is invalid, this method has no effect.
|
||||
|
||||
You can add an observer to this property to observe the playing status.
|
||||
*/
|
||||
@property (nonatomic) NSUInteger currentAnimatedImageIndex;
|
||||
|
||||
/**
|
||||
Whether the image view is playing animation currently.
|
||||
|
||||
You can add an observer to this property to observe the playing status.
|
||||
*/
|
||||
@property (nonatomic, readonly) BOOL currentIsPlayingAnimation;
|
||||
|
||||
/**
|
||||
The animation timer's runloop mode, default is `NSRunLoopCommonModes`.
|
||||
|
||||
Set this property to `NSDefaultRunLoopMode` will make the animation pause during
|
||||
UIScrollView scrolling.
|
||||
*/
|
||||
@property (nonatomic, copy) NSString *runloopMode;
|
||||
|
||||
/**
|
||||
The max size (in bytes) for inner frame buffer size, default is 0 (dynamically).
|
||||
|
||||
When the device has enough free memory, this view will request and decode some or
|
||||
all future frame image into an inner buffer. If this property's value is 0, then
|
||||
the max buffer size will be dynamically adjusted based on the current state of
|
||||
the device free memory. Otherwise, the buffer size will be limited by this value.
|
||||
|
||||
When receive memory warning or app enter background, the buffer will be released
|
||||
immediately, and may grow back at the right time.
|
||||
*/
|
||||
@property (nonatomic) NSUInteger maxBufferSize;
|
||||
|
||||
@end
|
||||
|
||||
|
||||
|
||||
/**
|
||||
The YYAnimatedImage protocol declares the required methods for animated image
|
||||
display with YYAnimatedImageView.
|
||||
|
||||
Subclass a UIImage and implement this protocol, so that instances of that class
|
||||
can be set to YYAnimatedImageView.image or YYAnimatedImageView.highlightedImage
|
||||
to display animation.
|
||||
|
||||
See `YYImage` and `YYFrameImage` for example.
|
||||
*/
|
||||
@protocol YYAnimatedImage <NSObject>
|
||||
@required
|
||||
/// Total animated frame count.
|
||||
/// It the frame count is less than 1, then the methods below will be ignored.
|
||||
- (NSUInteger)animatedImageFrameCount;
|
||||
|
||||
/// Animation loop count, 0 means infinite looping.
|
||||
- (NSUInteger)animatedImageLoopCount;
|
||||
|
||||
/// Bytes per frame (in memory). It may used to optimize memory buffer size.
|
||||
- (NSUInteger)animatedImageBytesPerFrame;
|
||||
|
||||
/// Returns the frame image from a specified index.
|
||||
/// This method may be called on background thread.
|
||||
/// @param index Frame index (zero based).
|
||||
- (nullable UIImage *)animatedImageFrameAtIndex:(NSUInteger)index;
|
||||
|
||||
/// Returns the frames's duration from a specified index.
|
||||
/// @param index Frame index (zero based).
|
||||
- (NSTimeInterval)animatedImageDurationAtIndex:(NSUInteger)index;
|
||||
|
||||
@optional
|
||||
/// A rectangle in image coordinates defining the subrectangle of the image that
|
||||
/// will be displayed. The rectangle should not outside the image's bounds.
|
||||
/// It may used to display sprite animation with a single image (sprite sheet).
|
||||
- (CGRect)animatedImageContentsRectAtIndex:(NSUInteger)index;
|
||||
@end
|
||||
|
||||
NS_ASSUME_NONNULL_END
|
672
Example/Pods/YYImage/YYImage/YYAnimatedImageView.m
generated
Normal file
672
Example/Pods/YYImage/YYImage/YYAnimatedImageView.m
generated
Normal file
@@ -0,0 +1,672 @@
|
||||
//
|
||||
// YYAnimatedImageView.m
|
||||
// YYImage <https://github.com/ibireme/YYImage>
|
||||
//
|
||||
// Created by ibireme on 14/10/19.
|
||||
// Copyright (c) 2015 ibireme.
|
||||
//
|
||||
// This source code is licensed under the MIT-style license found in the
|
||||
// LICENSE file in the root directory of this source tree.
|
||||
//
|
||||
|
||||
#import "YYAnimatedImageView.h"
|
||||
#import "YYImageCoder.h"
|
||||
#import <pthread.h>
|
||||
#import <mach/mach.h>
|
||||
|
||||
|
||||
#define BUFFER_SIZE (10 * 1024 * 1024) // 10MB (minimum memory buffer size)
|
||||
|
||||
#define LOCK(...) dispatch_semaphore_wait(self->_lock, DISPATCH_TIME_FOREVER); \
|
||||
__VA_ARGS__; \
|
||||
dispatch_semaphore_signal(self->_lock);
|
||||
|
||||
#define LOCK_VIEW(...) dispatch_semaphore_wait(view->_lock, DISPATCH_TIME_FOREVER); \
|
||||
__VA_ARGS__; \
|
||||
dispatch_semaphore_signal(view->_lock);
|
||||
|
||||
|
||||
static int64_t _YYDeviceMemoryTotal() {
|
||||
int64_t mem = [[NSProcessInfo processInfo] physicalMemory];
|
||||
if (mem < -1) mem = -1;
|
||||
return mem;
|
||||
}
|
||||
|
||||
static int64_t _YYDeviceMemoryFree() {
|
||||
mach_port_t host_port = mach_host_self();
|
||||
mach_msg_type_number_t host_size = sizeof(vm_statistics_data_t) / sizeof(integer_t);
|
||||
vm_size_t page_size;
|
||||
vm_statistics_data_t vm_stat;
|
||||
kern_return_t kern;
|
||||
|
||||
kern = host_page_size(host_port, &page_size);
|
||||
if (kern != KERN_SUCCESS) return -1;
|
||||
kern = host_statistics(host_port, HOST_VM_INFO, (host_info_t)&vm_stat, &host_size);
|
||||
if (kern != KERN_SUCCESS) return -1;
|
||||
return vm_stat.free_count * page_size;
|
||||
}
|
||||
|
||||
/**
|
||||
A proxy used to hold a weak object.
|
||||
It can be used to avoid retain cycles, such as the target in NSTimer or CADisplayLink.
|
||||
*/
|
||||
@interface _YYImageWeakProxy : NSProxy
|
||||
@property (nonatomic, weak, readonly) id target;
|
||||
- (instancetype)initWithTarget:(id)target;
|
||||
+ (instancetype)proxyWithTarget:(id)target;
|
||||
@end
|
||||
|
||||
@implementation _YYImageWeakProxy
|
||||
- (instancetype)initWithTarget:(id)target {
|
||||
_target = target;
|
||||
return self;
|
||||
}
|
||||
+ (instancetype)proxyWithTarget:(id)target {
|
||||
return [[_YYImageWeakProxy alloc] initWithTarget:target];
|
||||
}
|
||||
- (id)forwardingTargetForSelector:(SEL)selector {
|
||||
return _target;
|
||||
}
|
||||
- (void)forwardInvocation:(NSInvocation *)invocation {
|
||||
void *null = NULL;
|
||||
[invocation setReturnValue:&null];
|
||||
}
|
||||
- (NSMethodSignature *)methodSignatureForSelector:(SEL)selector {
|
||||
return [NSObject instanceMethodSignatureForSelector:@selector(init)];
|
||||
}
|
||||
- (BOOL)respondsToSelector:(SEL)aSelector {
|
||||
return [_target respondsToSelector:aSelector];
|
||||
}
|
||||
- (BOOL)isEqual:(id)object {
|
||||
return [_target isEqual:object];
|
||||
}
|
||||
- (NSUInteger)hash {
|
||||
return [_target hash];
|
||||
}
|
||||
- (Class)superclass {
|
||||
return [_target superclass];
|
||||
}
|
||||
- (Class)class {
|
||||
return [_target class];
|
||||
}
|
||||
- (BOOL)isKindOfClass:(Class)aClass {
|
||||
return [_target isKindOfClass:aClass];
|
||||
}
|
||||
- (BOOL)isMemberOfClass:(Class)aClass {
|
||||
return [_target isMemberOfClass:aClass];
|
||||
}
|
||||
- (BOOL)conformsToProtocol:(Protocol *)aProtocol {
|
||||
return [_target conformsToProtocol:aProtocol];
|
||||
}
|
||||
- (BOOL)isProxy {
|
||||
return YES;
|
||||
}
|
||||
- (NSString *)description {
|
||||
return [_target description];
|
||||
}
|
||||
- (NSString *)debugDescription {
|
||||
return [_target debugDescription];
|
||||
}
|
||||
@end
|
||||
|
||||
|
||||
|
||||
|
||||
typedef NS_ENUM(NSUInteger, YYAnimatedImageType) {
|
||||
YYAnimatedImageTypeNone = 0,
|
||||
YYAnimatedImageTypeImage,
|
||||
YYAnimatedImageTypeHighlightedImage,
|
||||
YYAnimatedImageTypeImages,
|
||||
YYAnimatedImageTypeHighlightedImages,
|
||||
};
|
||||
|
||||
@interface YYAnimatedImageView() {
|
||||
@package
|
||||
UIImage <YYAnimatedImage> *_curAnimatedImage;
|
||||
|
||||
dispatch_once_t _onceToken;
|
||||
dispatch_semaphore_t _lock; ///< lock for _buffer
|
||||
NSOperationQueue *_requestQueue; ///< image request queue, serial
|
||||
|
||||
CADisplayLink *_link; ///< ticker for change frame
|
||||
NSTimeInterval _time; ///< time after last frame
|
||||
|
||||
UIImage *_curFrame; ///< current frame to display
|
||||
NSUInteger _curIndex; ///< current frame index (from 0)
|
||||
NSUInteger _totalFrameCount; ///< total frame count
|
||||
|
||||
BOOL _loopEnd; ///< whether the loop is end.
|
||||
NSUInteger _curLoop; ///< current loop count (from 0)
|
||||
NSUInteger _totalLoop; ///< total loop count, 0 means infinity
|
||||
|
||||
NSMutableDictionary *_buffer; ///< frame buffer
|
||||
BOOL _bufferMiss; ///< whether miss frame on last opportunity
|
||||
NSUInteger _maxBufferCount; ///< maximum buffer count
|
||||
NSInteger _incrBufferCount; ///< current allowed buffer count (will increase by step)
|
||||
|
||||
CGRect _curContentsRect;
|
||||
BOOL _curImageHasContentsRect; ///< image has implementated "animatedImageContentsRectAtIndex:"
|
||||
}
|
||||
@property (nonatomic, readwrite) BOOL currentIsPlayingAnimation;
|
||||
- (void)calcMaxBufferCount;
|
||||
@end
|
||||
|
||||
/// An operation for image fetch
|
||||
@interface _YYAnimatedImageViewFetchOperation : NSOperation
|
||||
@property (nonatomic, weak) YYAnimatedImageView *view;
|
||||
@property (nonatomic, assign) NSUInteger nextIndex;
|
||||
@property (nonatomic, strong) UIImage <YYAnimatedImage> *curImage;
|
||||
@end
|
||||
|
||||
@implementation _YYAnimatedImageViewFetchOperation
|
||||
- (void)main {
|
||||
__strong YYAnimatedImageView *view = _view;
|
||||
if (!view) return;
|
||||
if ([self isCancelled]) return;
|
||||
view->_incrBufferCount++;
|
||||
if (view->_incrBufferCount == 0) [view calcMaxBufferCount];
|
||||
if (view->_incrBufferCount > (NSInteger)view->_maxBufferCount) {
|
||||
view->_incrBufferCount = view->_maxBufferCount;
|
||||
}
|
||||
NSUInteger idx = _nextIndex;
|
||||
NSUInteger max = view->_incrBufferCount < 1 ? 1 : view->_incrBufferCount;
|
||||
NSUInteger total = view->_totalFrameCount;
|
||||
view = nil;
|
||||
|
||||
for (int i = 0; i < max; i++, idx++) {
|
||||
@autoreleasepool {
|
||||
if (idx >= total) idx = 0;
|
||||
if ([self isCancelled]) break;
|
||||
__strong YYAnimatedImageView *view = _view;
|
||||
if (!view) break;
|
||||
LOCK_VIEW(BOOL miss = (view->_buffer[@(idx)] == nil));
|
||||
|
||||
if (miss) {
|
||||
UIImage *img = [_curImage animatedImageFrameAtIndex:idx];
|
||||
img = img.yy_imageByDecoded;
|
||||
if ([self isCancelled]) break;
|
||||
LOCK_VIEW(view->_buffer[@(idx)] = img ? img : [NSNull null]);
|
||||
view = nil;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@end
|
||||
|
||||
@implementation YYAnimatedImageView
|
||||
|
||||
- (instancetype)init {
|
||||
self = [super init];
|
||||
_runloopMode = NSRunLoopCommonModes;
|
||||
_autoPlayAnimatedImage = YES;
|
||||
return self;
|
||||
}
|
||||
|
||||
- (instancetype)initWithFrame:(CGRect)frame {
|
||||
self = [super initWithFrame:frame];
|
||||
_runloopMode = NSRunLoopCommonModes;
|
||||
_autoPlayAnimatedImage = YES;
|
||||
return self;
|
||||
}
|
||||
|
||||
- (instancetype)initWithImage:(UIImage *)image {
|
||||
self = [super init];
|
||||
_runloopMode = NSRunLoopCommonModes;
|
||||
_autoPlayAnimatedImage = YES;
|
||||
self.frame = (CGRect) {CGPointZero, image.size };
|
||||
self.image = image;
|
||||
return self;
|
||||
}
|
||||
|
||||
- (instancetype)initWithImage:(UIImage *)image highlightedImage:(UIImage *)highlightedImage {
|
||||
self = [super init];
|
||||
_runloopMode = NSRunLoopCommonModes;
|
||||
_autoPlayAnimatedImage = YES;
|
||||
CGSize size = image ? image.size : highlightedImage.size;
|
||||
self.frame = (CGRect) {CGPointZero, size };
|
||||
self.image = image;
|
||||
self.highlightedImage = highlightedImage;
|
||||
return self;
|
||||
}
|
||||
|
||||
// init the animated params.
|
||||
- (void)resetAnimated {
|
||||
dispatch_once(&_onceToken, ^{
|
||||
_lock = dispatch_semaphore_create(1);
|
||||
_buffer = [NSMutableDictionary new];
|
||||
_requestQueue = [[NSOperationQueue alloc] init];
|
||||
_requestQueue.maxConcurrentOperationCount = 1;
|
||||
_link = [CADisplayLink displayLinkWithTarget:[_YYImageWeakProxy proxyWithTarget:self] selector:@selector(step:)];
|
||||
if (_runloopMode) {
|
||||
[_link addToRunLoop:[NSRunLoop mainRunLoop] forMode:_runloopMode];
|
||||
}
|
||||
_link.paused = YES;
|
||||
|
||||
[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(didReceiveMemoryWarning:) name:UIApplicationDidReceiveMemoryWarningNotification object:nil];
|
||||
[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(didEnterBackground:) name:UIApplicationDidEnterBackgroundNotification object:nil];
|
||||
});
|
||||
|
||||
[_requestQueue cancelAllOperations];
|
||||
LOCK(
|
||||
if (_buffer.count) {
|
||||
NSMutableDictionary *holder = _buffer;
|
||||
_buffer = [NSMutableDictionary new];
|
||||
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_LOW, 0), ^{
|
||||
// Capture the dictionary to global queue,
|
||||
// release these images in background to avoid blocking UI thread.
|
||||
[holder class];
|
||||
});
|
||||
}
|
||||
);
|
||||
_link.paused = YES;
|
||||
_time = 0;
|
||||
if (_curIndex != 0) {
|
||||
[self willChangeValueForKey:@"currentAnimatedImageIndex"];
|
||||
_curIndex = 0;
|
||||
[self didChangeValueForKey:@"currentAnimatedImageIndex"];
|
||||
}
|
||||
_curAnimatedImage = nil;
|
||||
_curFrame = nil;
|
||||
_curLoop = 0;
|
||||
_totalLoop = 0;
|
||||
_totalFrameCount = 1;
|
||||
_loopEnd = NO;
|
||||
_bufferMiss = NO;
|
||||
_incrBufferCount = 0;
|
||||
}
|
||||
|
||||
- (void)setImage:(UIImage *)image {
|
||||
if (self.image == image) return;
|
||||
[self setImage:image withType:YYAnimatedImageTypeImage];
|
||||
}
|
||||
|
||||
- (void)setHighlightedImage:(UIImage *)highlightedImage {
|
||||
if (self.highlightedImage == highlightedImage) return;
|
||||
[self setImage:highlightedImage withType:YYAnimatedImageTypeHighlightedImage];
|
||||
}
|
||||
|
||||
- (void)setAnimationImages:(NSArray *)animationImages {
|
||||
if (self.animationImages == animationImages) return;
|
||||
[self setImage:animationImages withType:YYAnimatedImageTypeImages];
|
||||
}
|
||||
|
||||
- (void)setHighlightedAnimationImages:(NSArray *)highlightedAnimationImages {
|
||||
if (self.highlightedAnimationImages == highlightedAnimationImages) return;
|
||||
[self setImage:highlightedAnimationImages withType:YYAnimatedImageTypeHighlightedImages];
|
||||
}
|
||||
|
||||
- (void)setHighlighted:(BOOL)highlighted {
|
||||
[super setHighlighted:highlighted];
|
||||
if (_link) [self resetAnimated];
|
||||
[self imageChanged];
|
||||
}
|
||||
|
||||
- (id)imageForType:(YYAnimatedImageType)type {
|
||||
switch (type) {
|
||||
case YYAnimatedImageTypeNone: return nil;
|
||||
case YYAnimatedImageTypeImage: return self.image;
|
||||
case YYAnimatedImageTypeHighlightedImage: return self.highlightedImage;
|
||||
case YYAnimatedImageTypeImages: return self.animationImages;
|
||||
case YYAnimatedImageTypeHighlightedImages: return self.highlightedAnimationImages;
|
||||
}
|
||||
return nil;
|
||||
}
|
||||
|
||||
- (YYAnimatedImageType)currentImageType {
|
||||
YYAnimatedImageType curType = YYAnimatedImageTypeNone;
|
||||
if (self.highlighted) {
|
||||
if (self.highlightedAnimationImages.count) curType = YYAnimatedImageTypeHighlightedImages;
|
||||
else if (self.highlightedImage) curType = YYAnimatedImageTypeHighlightedImage;
|
||||
}
|
||||
if (curType == YYAnimatedImageTypeNone) {
|
||||
if (self.animationImages.count) curType = YYAnimatedImageTypeImages;
|
||||
else if (self.image) curType = YYAnimatedImageTypeImage;
|
||||
}
|
||||
return curType;
|
||||
}
|
||||
|
||||
- (void)setImage:(id)image withType:(YYAnimatedImageType)type {
|
||||
[self stopAnimating];
|
||||
if (_link) [self resetAnimated];
|
||||
_curFrame = nil;
|
||||
switch (type) {
|
||||
case YYAnimatedImageTypeNone: break;
|
||||
case YYAnimatedImageTypeImage: super.image = image; break;
|
||||
case YYAnimatedImageTypeHighlightedImage: super.highlightedImage = image; break;
|
||||
case YYAnimatedImageTypeImages: super.animationImages = image; break;
|
||||
case YYAnimatedImageTypeHighlightedImages: super.highlightedAnimationImages = image; break;
|
||||
}
|
||||
[self imageChanged];
|
||||
}
|
||||
|
||||
- (void)imageChanged {
|
||||
YYAnimatedImageType newType = [self currentImageType];
|
||||
id newVisibleImage = [self imageForType:newType];
|
||||
NSUInteger newImageFrameCount = 0;
|
||||
BOOL hasContentsRect = NO;
|
||||
if ([newVisibleImage isKindOfClass:[UIImage class]] &&
|
||||
[newVisibleImage conformsToProtocol:@protocol(YYAnimatedImage)]) {
|
||||
newImageFrameCount = ((UIImage<YYAnimatedImage> *) newVisibleImage).animatedImageFrameCount;
|
||||
if (newImageFrameCount > 1) {
|
||||
hasContentsRect = [((UIImage<YYAnimatedImage> *) newVisibleImage) respondsToSelector:@selector(animatedImageContentsRectAtIndex:)];
|
||||
}
|
||||
}
|
||||
if (!hasContentsRect && _curImageHasContentsRect) {
|
||||
if (!CGRectEqualToRect(self.layer.contentsRect, CGRectMake(0, 0, 1, 1)) ) {
|
||||
[CATransaction begin];
|
||||
[CATransaction setDisableActions:YES];
|
||||
self.layer.contentsRect = CGRectMake(0, 0, 1, 1);
|
||||
[CATransaction commit];
|
||||
}
|
||||
}
|
||||
_curImageHasContentsRect = hasContentsRect;
|
||||
if (hasContentsRect) {
|
||||
CGRect rect = [((UIImage<YYAnimatedImage> *) newVisibleImage) animatedImageContentsRectAtIndex:0];
|
||||
[self setContentsRect:rect forImage:newVisibleImage];
|
||||
}
|
||||
|
||||
if (newImageFrameCount > 1) {
|
||||
[self resetAnimated];
|
||||
_curAnimatedImage = newVisibleImage;
|
||||
_curFrame = newVisibleImage;
|
||||
_totalLoop = _curAnimatedImage.animatedImageLoopCount;
|
||||
_totalFrameCount = _curAnimatedImage.animatedImageFrameCount;
|
||||
[self calcMaxBufferCount];
|
||||
}
|
||||
[self setNeedsDisplay];
|
||||
[self didMoved];
|
||||
}
|
||||
|
||||
// dynamically adjust buffer size for current memory.
|
||||
- (void)calcMaxBufferCount {
|
||||
int64_t bytes = (int64_t)_curAnimatedImage.animatedImageBytesPerFrame;
|
||||
if (bytes == 0) bytes = 1024;
|
||||
|
||||
int64_t total = _YYDeviceMemoryTotal();
|
||||
int64_t free = _YYDeviceMemoryFree();
|
||||
int64_t max = MIN(total * 0.2, free * 0.6);
|
||||
max = MAX(max, BUFFER_SIZE);
|
||||
if (_maxBufferSize) max = max > _maxBufferSize ? _maxBufferSize : max;
|
||||
double maxBufferCount = (double)max / (double)bytes;
|
||||
if (maxBufferCount < 1) maxBufferCount = 1;
|
||||
else if (maxBufferCount > 512) maxBufferCount = 512;
|
||||
_maxBufferCount = maxBufferCount;
|
||||
}
|
||||
|
||||
- (void)dealloc {
|
||||
[_requestQueue cancelAllOperations];
|
||||
[[NSNotificationCenter defaultCenter] removeObserver:self name:UIApplicationDidReceiveMemoryWarningNotification object:nil];
|
||||
[[NSNotificationCenter defaultCenter] removeObserver:self name:UIApplicationDidEnterBackgroundNotification object:nil];
|
||||
[_link invalidate];
|
||||
}
|
||||
|
||||
- (BOOL)isAnimating {
|
||||
return self.currentIsPlayingAnimation;
|
||||
}
|
||||
|
||||
- (void)stopAnimating {
|
||||
[super stopAnimating];
|
||||
[_requestQueue cancelAllOperations];
|
||||
_link.paused = YES;
|
||||
self.currentIsPlayingAnimation = NO;
|
||||
}
|
||||
|
||||
- (void)startAnimating {
|
||||
YYAnimatedImageType type = [self currentImageType];
|
||||
if (type == YYAnimatedImageTypeImages || type == YYAnimatedImageTypeHighlightedImages) {
|
||||
NSArray *images = [self imageForType:type];
|
||||
if (images.count > 0) {
|
||||
[super startAnimating];
|
||||
self.currentIsPlayingAnimation = YES;
|
||||
}
|
||||
} else {
|
||||
if (_curAnimatedImage && _link.paused) {
|
||||
_curLoop = 0;
|
||||
_loopEnd = NO;
|
||||
_link.paused = NO;
|
||||
self.currentIsPlayingAnimation = YES;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
- (void)didReceiveMemoryWarning:(NSNotification *)notification {
|
||||
[_requestQueue cancelAllOperations];
|
||||
[_requestQueue addOperationWithBlock: ^{
|
||||
_incrBufferCount = -60 - (int)(arc4random() % 120); // about 1~3 seconds to grow back..
|
||||
NSNumber *next = @((_curIndex + 1) % _totalFrameCount);
|
||||
LOCK(
|
||||
NSArray * keys = _buffer.allKeys;
|
||||
for (NSNumber * key in keys) {
|
||||
if (![key isEqualToNumber:next]) { // keep the next frame for smoothly animation
|
||||
[_buffer removeObjectForKey:key];
|
||||
}
|
||||
}
|
||||
)//LOCK
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)didEnterBackground:(NSNotification *)notification {
|
||||
[_requestQueue cancelAllOperations];
|
||||
NSNumber *next = @((_curIndex + 1) % _totalFrameCount);
|
||||
LOCK(
|
||||
NSArray * keys = _buffer.allKeys;
|
||||
for (NSNumber * key in keys) {
|
||||
if (![key isEqualToNumber:next]) { // keep the next frame for smoothly animation
|
||||
[_buffer removeObjectForKey:key];
|
||||
}
|
||||
}
|
||||
)//LOCK
|
||||
}
|
||||
|
||||
- (void)step:(CADisplayLink *)link {
|
||||
UIImage <YYAnimatedImage> *image = _curAnimatedImage;
|
||||
NSMutableDictionary *buffer = _buffer;
|
||||
UIImage *bufferedImage = nil;
|
||||
NSUInteger nextIndex = (_curIndex + 1) % _totalFrameCount;
|
||||
BOOL bufferIsFull = NO;
|
||||
|
||||
if (!image) return;
|
||||
if (_loopEnd) { // view will keep in last frame
|
||||
[self stopAnimating];
|
||||
return;
|
||||
}
|
||||
|
||||
NSTimeInterval delay = 0;
|
||||
if (!_bufferMiss) {
|
||||
_time += link.duration;
|
||||
delay = [image animatedImageDurationAtIndex:_curIndex];
|
||||
if (_time < delay) return;
|
||||
_time -= delay;
|
||||
if (nextIndex == 0) {
|
||||
_curLoop++;
|
||||
if (_curLoop >= _totalLoop && _totalLoop != 0) {
|
||||
_loopEnd = YES;
|
||||
[self stopAnimating];
|
||||
[self.layer setNeedsDisplay]; // let system call `displayLayer:` before runloop sleep
|
||||
return; // stop at last frame
|
||||
}
|
||||
}
|
||||
delay = [image animatedImageDurationAtIndex:nextIndex];
|
||||
if (_time > delay) _time = delay; // do not jump over frame
|
||||
}
|
||||
LOCK(
|
||||
bufferedImage = buffer[@(nextIndex)];
|
||||
if (bufferedImage) {
|
||||
if ((int)_incrBufferCount < _totalFrameCount) {
|
||||
[buffer removeObjectForKey:@(nextIndex)];
|
||||
}
|
||||
[self willChangeValueForKey:@"currentAnimatedImageIndex"];
|
||||
_curIndex = nextIndex;
|
||||
[self didChangeValueForKey:@"currentAnimatedImageIndex"];
|
||||
_curFrame = bufferedImage == (id)[NSNull null] ? nil : bufferedImage;
|
||||
if (_curImageHasContentsRect) {
|
||||
_curContentsRect = [image animatedImageContentsRectAtIndex:_curIndex];
|
||||
[self setContentsRect:_curContentsRect forImage:_curFrame];
|
||||
}
|
||||
nextIndex = (_curIndex + 1) % _totalFrameCount;
|
||||
_bufferMiss = NO;
|
||||
if (buffer.count == _totalFrameCount) {
|
||||
bufferIsFull = YES;
|
||||
}
|
||||
} else {
|
||||
_bufferMiss = YES;
|
||||
}
|
||||
)//LOCK
|
||||
|
||||
if (!_bufferMiss) {
|
||||
[self.layer setNeedsDisplay]; // let system call `displayLayer:` before runloop sleep
|
||||
}
|
||||
|
||||
if (!bufferIsFull && _requestQueue.operationCount == 0) { // if some work not finished, wait for next opportunity
|
||||
_YYAnimatedImageViewFetchOperation *operation = [_YYAnimatedImageViewFetchOperation new];
|
||||
operation.view = self;
|
||||
operation.nextIndex = nextIndex;
|
||||
operation.curImage = image;
|
||||
[_requestQueue addOperation:operation];
|
||||
}
|
||||
}
|
||||
|
||||
- (void)displayLayer:(CALayer *)layer {
|
||||
if (_curFrame) {
|
||||
layer.contents = (__bridge id)_curFrame.CGImage;
|
||||
}
|
||||
}
|
||||
|
||||
- (void)setContentsRect:(CGRect)rect forImage:(UIImage *)image{
|
||||
CGRect layerRect = CGRectMake(0, 0, 1, 1);
|
||||
if (image) {
|
||||
CGSize imageSize = image.size;
|
||||
if (imageSize.width > 0.01 && imageSize.height > 0.01) {
|
||||
layerRect.origin.x = rect.origin.x / imageSize.width;
|
||||
layerRect.origin.y = rect.origin.y / imageSize.height;
|
||||
layerRect.size.width = rect.size.width / imageSize.width;
|
||||
layerRect.size.height = rect.size.height / imageSize.height;
|
||||
layerRect = CGRectIntersection(layerRect, CGRectMake(0, 0, 1, 1));
|
||||
if (CGRectIsNull(layerRect) || CGRectIsEmpty(layerRect)) {
|
||||
layerRect = CGRectMake(0, 0, 1, 1);
|
||||
}
|
||||
}
|
||||
}
|
||||
[CATransaction begin];
|
||||
[CATransaction setDisableActions:YES];
|
||||
self.layer.contentsRect = layerRect;
|
||||
[CATransaction commit];
|
||||
}
|
||||
|
||||
- (void)didMoved {
|
||||
if (self.autoPlayAnimatedImage) {
|
||||
if(self.superview && self.window) {
|
||||
[self startAnimating];
|
||||
} else {
|
||||
[self stopAnimating];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
- (void)didMoveToWindow {
|
||||
[super didMoveToWindow];
|
||||
[self didMoved];
|
||||
}
|
||||
|
||||
- (void)didMoveToSuperview {
|
||||
[super didMoveToSuperview];
|
||||
[self didMoved];
|
||||
}
|
||||
|
||||
- (void)setCurrentAnimatedImageIndex:(NSUInteger)currentAnimatedImageIndex {
|
||||
if (!_curAnimatedImage) return;
|
||||
if (currentAnimatedImageIndex >= _curAnimatedImage.animatedImageFrameCount) return;
|
||||
if (_curIndex == currentAnimatedImageIndex) return;
|
||||
|
||||
void (^block)() = ^{
|
||||
LOCK(
|
||||
[_requestQueue cancelAllOperations];
|
||||
[_buffer removeAllObjects];
|
||||
[self willChangeValueForKey:@"currentAnimatedImageIndex"];
|
||||
_curIndex = currentAnimatedImageIndex;
|
||||
[self didChangeValueForKey:@"currentAnimatedImageIndex"];
|
||||
_curFrame = [_curAnimatedImage animatedImageFrameAtIndex:_curIndex];
|
||||
if (_curImageHasContentsRect) {
|
||||
_curContentsRect = [_curAnimatedImage animatedImageContentsRectAtIndex:_curIndex];
|
||||
}
|
||||
_time = 0;
|
||||
_loopEnd = NO;
|
||||
_bufferMiss = NO;
|
||||
[self.layer setNeedsDisplay];
|
||||
)//LOCK
|
||||
};
|
||||
|
||||
if (pthread_main_np()) {
|
||||
block();
|
||||
} else {
|
||||
dispatch_async(dispatch_get_main_queue(), block);
|
||||
}
|
||||
}
|
||||
|
||||
- (NSUInteger)currentAnimatedImageIndex {
|
||||
return _curIndex;
|
||||
}
|
||||
|
||||
- (void)setRunloopMode:(NSString *)runloopMode {
|
||||
if ([_runloopMode isEqual:runloopMode]) return;
|
||||
if (_link) {
|
||||
if (_runloopMode) {
|
||||
[_link removeFromRunLoop:[NSRunLoop mainRunLoop] forMode:_runloopMode];
|
||||
}
|
||||
if (runloopMode.length) {
|
||||
[_link addToRunLoop:[NSRunLoop mainRunLoop] forMode:runloopMode];
|
||||
}
|
||||
}
|
||||
_runloopMode = runloopMode.copy;
|
||||
}
|
||||
|
||||
#pragma mark - Override NSObject(NSKeyValueObservingCustomization)
|
||||
|
||||
+ (BOOL)automaticallyNotifiesObserversForKey:(NSString *)key {
|
||||
if ([key isEqualToString:@"currentAnimatedImageIndex"]) {
|
||||
return NO;
|
||||
}
|
||||
return [super automaticallyNotifiesObserversForKey:key];
|
||||
}
|
||||
|
||||
#pragma mark - NSCoding
|
||||
|
||||
- (instancetype)initWithCoder:(NSCoder *)aDecoder {
|
||||
self = [super initWithCoder:aDecoder];
|
||||
_runloopMode = [aDecoder decodeObjectForKey:@"runloopMode"];
|
||||
if (_runloopMode.length == 0) _runloopMode = NSRunLoopCommonModes;
|
||||
if ([aDecoder containsValueForKey:@"autoPlayAnimatedImage"]) {
|
||||
_autoPlayAnimatedImage = [aDecoder decodeBoolForKey:@"autoPlayAnimatedImage"];
|
||||
} else {
|
||||
_autoPlayAnimatedImage = YES;
|
||||
}
|
||||
|
||||
UIImage *image = [aDecoder decodeObjectForKey:@"YYAnimatedImage"];
|
||||
UIImage *highlightedImage = [aDecoder decodeObjectForKey:@"YYHighlightedAnimatedImage"];
|
||||
if (image) {
|
||||
self.image = image;
|
||||
[self setImage:image withType:YYAnimatedImageTypeImage];
|
||||
}
|
||||
if (highlightedImage) {
|
||||
self.highlightedImage = highlightedImage;
|
||||
[self setImage:highlightedImage withType:YYAnimatedImageTypeHighlightedImage];
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)encodeWithCoder:(NSCoder *)aCoder {
|
||||
[super encodeWithCoder:aCoder];
|
||||
[aCoder encodeObject:_runloopMode forKey:@"runloopMode"];
|
||||
[aCoder encodeBool:_autoPlayAnimatedImage forKey:@"autoPlayAnimatedImage"];
|
||||
|
||||
BOOL ani, multi;
|
||||
ani = [self.image conformsToProtocol:@protocol(YYAnimatedImage)];
|
||||
multi = (ani && ((UIImage <YYAnimatedImage> *)self.image).animatedImageFrameCount > 1);
|
||||
if (multi) [aCoder encodeObject:self.image forKey:@"YYAnimatedImage"];
|
||||
|
||||
ani = [self.highlightedImage conformsToProtocol:@protocol(YYAnimatedImage)];
|
||||
multi = (ani && ((UIImage <YYAnimatedImage> *)self.highlightedImage).animatedImageFrameCount > 1);
|
||||
if (multi) [aCoder encodeObject:self.highlightedImage forKey:@"YYHighlightedAnimatedImage"];
|
||||
}
|
||||
|
||||
@end
|
109
Example/Pods/YYImage/YYImage/YYFrameImage.h
generated
Normal file
109
Example/Pods/YYImage/YYImage/YYFrameImage.h
generated
Normal file
@@ -0,0 +1,109 @@
|
||||
//
|
||||
// YYFrameImage.h
|
||||
// YYImage <https://github.com/ibireme/YYImage>
|
||||
//
|
||||
// Created by ibireme on 14/12/9.
|
||||
// Copyright (c) 2015 ibireme.
|
||||
//
|
||||
// This source code is licensed under the MIT-style license found in the
|
||||
// LICENSE file in the root directory of this source tree.
|
||||
//
|
||||
|
||||
#import <UIKit/UIKit.h>
|
||||
|
||||
#if __has_include(<YYImage/YYImage.h>)
|
||||
#import <YYImage/YYAnimatedImageView.h>
|
||||
#elif __has_include(<YYWebImage/YYImage.h>)
|
||||
#import <YYWebImage/YYAnimatedImageView.h>
|
||||
#else
|
||||
#import "YYAnimatedImageView.h"
|
||||
#endif
|
||||
|
||||
NS_ASSUME_NONNULL_BEGIN
|
||||
|
||||
/**
|
||||
An image to display frame-based animation.
|
||||
|
||||
@discussion It is a fully compatible `UIImage` subclass.
|
||||
It only support system image format such as png and jpeg.
|
||||
The animation can be played by YYAnimatedImageView.
|
||||
|
||||
Sample Code:
|
||||
|
||||
NSArray *paths = @[@"/ani/frame1.png", @"/ani/frame2.png", @"/ani/frame3.png"];
|
||||
NSArray *times = @[@0.1, @0.2, @0.1];
|
||||
YYFrameImage *image = [YYFrameImage alloc] initWithImagePaths:paths frameDurations:times repeats:YES];
|
||||
YYAnimatedImageView *imageView = [YYAnimatedImageView alloc] initWithImage:image];
|
||||
[view addSubView:imageView];
|
||||
*/
|
||||
@interface YYFrameImage : UIImage <YYAnimatedImage>
|
||||
|
||||
/**
|
||||
Create a frame animated image from files.
|
||||
|
||||
@param paths An array of NSString objects, contains the full or
|
||||
partial path to each image file.
|
||||
e.g. @[@"/ani/1.png",@"/ani/2.png",@"/ani/3.png"]
|
||||
|
||||
@param oneFrameDuration The duration (in seconds) per frame.
|
||||
|
||||
@param loopCount The animation loop count, 0 means infinite.
|
||||
|
||||
@return An initialized YYFrameImage object, or nil when an error occurs.
|
||||
*/
|
||||
- (nullable instancetype)initWithImagePaths:(NSArray<NSString *> *)paths
|
||||
oneFrameDuration:(NSTimeInterval)oneFrameDuration
|
||||
loopCount:(NSUInteger)loopCount;
|
||||
|
||||
/**
|
||||
Create a frame animated image from files.
|
||||
|
||||
@param paths An array of NSString objects, contains the full or
|
||||
partial path to each image file.
|
||||
e.g. @[@"/ani/frame1.png",@"/ani/frame2.png",@"/ani/frame3.png"]
|
||||
|
||||
@param frameDurations An array of NSNumber objects, contains the duration (in seconds) per frame.
|
||||
e.g. @[@0.1, @0.2, @0.3];
|
||||
|
||||
@param loopCount The animation loop count, 0 means infinite.
|
||||
|
||||
@return An initialized YYFrameImage object, or nil when an error occurs.
|
||||
*/
|
||||
- (nullable instancetype)initWithImagePaths:(NSArray<NSString *> *)paths
|
||||
frameDurations:(NSArray<NSNumber *> *)frameDurations
|
||||
loopCount:(NSUInteger)loopCount;
|
||||
|
||||
/**
|
||||
Create a frame animated image from an array of data.
|
||||
|
||||
@param dataArray An array of NSData objects.
|
||||
|
||||
@param oneFrameDuration The duration (in seconds) per frame.
|
||||
|
||||
@param loopCount The animation loop count, 0 means infinite.
|
||||
|
||||
@return An initialized YYFrameImage object, or nil when an error occurs.
|
||||
*/
|
||||
- (nullable instancetype)initWithImageDataArray:(NSArray<NSData *> *)dataArray
|
||||
oneFrameDuration:(NSTimeInterval)oneFrameDuration
|
||||
loopCount:(NSUInteger)loopCount;
|
||||
|
||||
/**
|
||||
Create a frame animated image from an array of data.
|
||||
|
||||
@param dataArray An array of NSData objects.
|
||||
|
||||
@param frameDurations An array of NSNumber objects, contains the duration (in seconds) per frame.
|
||||
e.g. @[@0.1, @0.2, @0.3];
|
||||
|
||||
@param loopCount The animation loop count, 0 means infinite.
|
||||
|
||||
@return An initialized YYFrameImage object, or nil when an error occurs.
|
||||
*/
|
||||
- (nullable instancetype)initWithImageDataArray:(NSArray<NSData *> *)dataArray
|
||||
frameDurations:(NSArray *)frameDurations
|
||||
loopCount:(NSUInteger)loopCount;
|
||||
|
||||
@end
|
||||
|
||||
NS_ASSUME_NONNULL_END
|
150
Example/Pods/YYImage/YYImage/YYFrameImage.m
generated
Normal file
150
Example/Pods/YYImage/YYImage/YYFrameImage.m
generated
Normal file
@@ -0,0 +1,150 @@
|
||||
//
|
||||
// YYFrameImage.m
|
||||
// YYImage <https://github.com/ibireme/YYImage>
|
||||
//
|
||||
// Created by ibireme on 14/12/9.
|
||||
// Copyright (c) 2015 ibireme.
|
||||
//
|
||||
// This source code is licensed under the MIT-style license found in the
|
||||
// LICENSE file in the root directory of this source tree.
|
||||
//
|
||||
|
||||
#import "YYFrameImage.h"
|
||||
#import "YYImageCoder.h"
|
||||
|
||||
|
||||
/**
|
||||
Return the path scale.
|
||||
|
||||
e.g.
|
||||
<table>
|
||||
<tr><th>Path </th><th>Scale </th></tr>
|
||||
<tr><td>"icon.png" </td><td>1 </td></tr>
|
||||
<tr><td>"icon@2x.png" </td><td>2 </td></tr>
|
||||
<tr><td>"icon@2.5x.png" </td><td>2.5 </td></tr>
|
||||
<tr><td>"icon@2x" </td><td>1 </td></tr>
|
||||
<tr><td>"icon@2x..png" </td><td>1 </td></tr>
|
||||
<tr><td>"icon@2x.png/" </td><td>1 </td></tr>
|
||||
</table>
|
||||
*/
|
||||
static CGFloat _NSStringPathScale(NSString *string) {
|
||||
if (string.length == 0 || [string hasSuffix:@"/"]) return 1;
|
||||
NSString *name = string.stringByDeletingPathExtension;
|
||||
__block CGFloat scale = 1;
|
||||
|
||||
NSRegularExpression *pattern = [NSRegularExpression regularExpressionWithPattern:@"@[0-9]+\\.?[0-9]*x$" options:NSRegularExpressionAnchorsMatchLines error:nil];
|
||||
[pattern enumerateMatchesInString:name options:kNilOptions range:NSMakeRange(0, name.length) usingBlock:^(NSTextCheckingResult *result, NSMatchingFlags flags, BOOL *stop) {
|
||||
if (result.range.location >= 3) {
|
||||
scale = [string substringWithRange:NSMakeRange(result.range.location + 1, result.range.length - 2)].doubleValue;
|
||||
}
|
||||
}];
|
||||
|
||||
return scale;
|
||||
}
|
||||
|
||||
|
||||
|
||||
@implementation YYFrameImage {
|
||||
NSUInteger _loopCount;
|
||||
NSUInteger _oneFrameBytes;
|
||||
NSArray *_imagePaths;
|
||||
NSArray *_imageDatas;
|
||||
NSArray *_frameDurations;
|
||||
}
|
||||
|
||||
- (instancetype)initWithImagePaths:(NSArray *)paths oneFrameDuration:(NSTimeInterval)oneFrameDuration loopCount:(NSUInteger)loopCount {
|
||||
NSMutableArray *durations = [NSMutableArray new];
|
||||
for (int i = 0, max = (int)paths.count; i < max; i++) {
|
||||
[durations addObject:@(oneFrameDuration)];
|
||||
}
|
||||
return [self initWithImagePaths:paths frameDurations:durations loopCount:loopCount];
|
||||
}
|
||||
|
||||
- (instancetype)initWithImagePaths:(NSArray *)paths frameDurations:(NSArray *)frameDurations loopCount:(NSUInteger)loopCount {
|
||||
if (paths.count == 0) return nil;
|
||||
if (paths.count != frameDurations.count) return nil;
|
||||
|
||||
NSString *firstPath = paths[0];
|
||||
NSData *firstData = [NSData dataWithContentsOfFile:firstPath];
|
||||
CGFloat scale = _NSStringPathScale(firstPath);
|
||||
UIImage *firstCG = [[[UIImage alloc] initWithData:firstData] yy_imageByDecoded];
|
||||
self = [self initWithCGImage:firstCG.CGImage scale:scale orientation:UIImageOrientationUp];
|
||||
if (!self) return nil;
|
||||
long frameByte = CGImageGetBytesPerRow(firstCG.CGImage) * CGImageGetHeight(firstCG.CGImage);
|
||||
_oneFrameBytes = (NSUInteger)frameByte;
|
||||
_imagePaths = paths.copy;
|
||||
_frameDurations = frameDurations.copy;
|
||||
_loopCount = loopCount;
|
||||
|
||||
return self;
|
||||
}
|
||||
|
||||
- (instancetype)initWithImageDataArray:(NSArray *)dataArray oneFrameDuration:(NSTimeInterval)oneFrameDuration loopCount:(NSUInteger)loopCount {
|
||||
NSMutableArray *durations = [NSMutableArray new];
|
||||
for (int i = 0, max = (int)dataArray.count; i < max; i++) {
|
||||
[durations addObject:@(oneFrameDuration)];
|
||||
}
|
||||
return [self initWithImageDataArray:dataArray frameDurations:durations loopCount:loopCount];
|
||||
}
|
||||
|
||||
- (instancetype)initWithImageDataArray:(NSArray *)dataArray frameDurations:(NSArray *)frameDurations loopCount:(NSUInteger)loopCount {
|
||||
if (dataArray.count == 0) return nil;
|
||||
if (dataArray.count != frameDurations.count) return nil;
|
||||
|
||||
NSData *firstData = dataArray[0];
|
||||
CGFloat scale = [UIScreen mainScreen].scale;
|
||||
UIImage *firstCG = [[[UIImage alloc] initWithData:firstData] yy_imageByDecoded];
|
||||
self = [self initWithCGImage:firstCG.CGImage scale:scale orientation:UIImageOrientationUp];
|
||||
if (!self) return nil;
|
||||
long frameByte = CGImageGetBytesPerRow(firstCG.CGImage) * CGImageGetHeight(firstCG.CGImage);
|
||||
_oneFrameBytes = (NSUInteger)frameByte;
|
||||
_imageDatas = dataArray.copy;
|
||||
_frameDurations = frameDurations.copy;
|
||||
_loopCount = loopCount;
|
||||
|
||||
return self;
|
||||
}
|
||||
|
||||
#pragma mark - YYAnimtedImage
|
||||
|
||||
- (NSUInteger)animatedImageFrameCount {
|
||||
if (_imagePaths) {
|
||||
return _imagePaths.count;
|
||||
} else if (_imageDatas) {
|
||||
return _imageDatas.count;
|
||||
} else {
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
|
||||
- (NSUInteger)animatedImageLoopCount {
|
||||
return _loopCount;
|
||||
}
|
||||
|
||||
- (NSUInteger)animatedImageBytesPerFrame {
|
||||
return _oneFrameBytes;
|
||||
}
|
||||
|
||||
- (UIImage *)animatedImageFrameAtIndex:(NSUInteger)index {
|
||||
if (_imagePaths) {
|
||||
if (index >= _imagePaths.count) return nil;
|
||||
NSString *path = _imagePaths[index];
|
||||
CGFloat scale = _NSStringPathScale(path);
|
||||
NSData *data = [NSData dataWithContentsOfFile:path];
|
||||
return [[UIImage imageWithData:data scale:scale] yy_imageByDecoded];
|
||||
} else if (_imageDatas) {
|
||||
if (index >= _imageDatas.count) return nil;
|
||||
NSData *data = _imageDatas[index];
|
||||
return [[UIImage imageWithData:data scale:[UIScreen mainScreen].scale] yy_imageByDecoded];
|
||||
} else {
|
||||
return index == 0 ? self : nil;
|
||||
}
|
||||
}
|
||||
|
||||
- (NSTimeInterval)animatedImageDurationAtIndex:(NSUInteger)index {
|
||||
if (index >= _frameDurations.count) return 0;
|
||||
NSNumber *num = _frameDurations[index];
|
||||
return [num doubleValue];
|
||||
}
|
||||
|
||||
@end
|
92
Example/Pods/YYImage/YYImage/YYImage.h
generated
Normal file
92
Example/Pods/YYImage/YYImage/YYImage.h
generated
Normal file
@@ -0,0 +1,92 @@
|
||||
//
|
||||
// YYImage.h
|
||||
// YYImage <https://github.com/ibireme/YYImage>
|
||||
//
|
||||
// Created by ibireme on 14/10/20.
|
||||
// Copyright (c) 2015 ibireme.
|
||||
//
|
||||
// This source code is licensed under the MIT-style license found in the
|
||||
// LICENSE file in the root directory of this source tree.
|
||||
//
|
||||
|
||||
#import <UIKit/UIKit.h>
|
||||
|
||||
#if __has_include(<YYImage/YYImage.h>)
|
||||
FOUNDATION_EXPORT double YYImageVersionNumber;
|
||||
FOUNDATION_EXPORT const unsigned char YYImageVersionString[];
|
||||
#import <YYImage/YYFrameImage.h>
|
||||
#import <YYImage/YYSpriteSheetImage.h>
|
||||
#import <YYImage/YYImageCoder.h>
|
||||
#import <YYImage/YYAnimatedImageView.h>
|
||||
#elif __has_include(<YYWebImage/YYImage.h>)
|
||||
#import <YYWebImage/YYFrameImage.h>
|
||||
#import <YYWebImage/YYSpriteSheetImage.h>
|
||||
#import <YYWebImage/YYImageCoder.h>
|
||||
#import <YYWebImage/YYAnimatedImageView.h>
|
||||
#else
|
||||
#import "YYFrameImage.h"
|
||||
#import "YYSpriteSheetImage.h"
|
||||
#import "YYImageCoder.h"
|
||||
#import "YYAnimatedImageView.h"
|
||||
#endif
|
||||
|
||||
NS_ASSUME_NONNULL_BEGIN
|
||||
|
||||
|
||||
/**
|
||||
A YYImage object is a high-level way to display animated image data.
|
||||
|
||||
@discussion It is a fully compatible `UIImage` subclass. It extends the UIImage
|
||||
to support animated WebP, APNG and GIF format image data decoding. It also
|
||||
support NSCoding protocol to archive and unarchive multi-frame image data.
|
||||
|
||||
If the image is created from multi-frame image data, and you want to play the
|
||||
animation, try replace UIImageView with `YYAnimatedImageView`.
|
||||
|
||||
Sample Code:
|
||||
|
||||
// animation@3x.webp
|
||||
YYImage *image = [YYImage imageNamed:@"animation.webp"];
|
||||
YYAnimatedImageView *imageView = [YYAnimatedImageView alloc] initWithImage:image];
|
||||
[view addSubView:imageView];
|
||||
|
||||
*/
|
||||
@interface YYImage : UIImage <YYAnimatedImage>
|
||||
|
||||
+ (nullable YYImage *)imageNamed:(NSString *)name; // no cache!
|
||||
+ (nullable YYImage *)imageWithContentsOfFile:(NSString *)path;
|
||||
+ (nullable YYImage *)imageWithData:(NSData *)data;
|
||||
+ (nullable YYImage *)imageWithData:(NSData *)data scale:(CGFloat)scale;
|
||||
|
||||
/**
|
||||
If the image is created from data or file, then the value indicates the data type.
|
||||
*/
|
||||
@property (nonatomic, readonly) YYImageType animatedImageType;
|
||||
|
||||
/**
|
||||
If the image is created from animated image data (multi-frame GIF/APNG/WebP),
|
||||
this property stores the original image data.
|
||||
*/
|
||||
@property (nullable, nonatomic, readonly) NSData *animatedImageData;
|
||||
|
||||
/**
|
||||
The total memory usage (in bytes) if all frame images was loaded into memory.
|
||||
The value is 0 if the image is not created from a multi-frame image data.
|
||||
*/
|
||||
@property (nonatomic, readonly) NSUInteger animatedImageMemorySize;
|
||||
|
||||
/**
|
||||
Preload all frame image to memory.
|
||||
|
||||
@discussion Set this property to `YES` will block the calling thread to decode
|
||||
all animation frame image to memory, set to `NO` will release the preloaded frames.
|
||||
If the image is shared by lots of image views (such as emoticon), preload all
|
||||
frames will reduce the CPU cost.
|
||||
|
||||
See `animatedImageMemorySize` for memory cost.
|
||||
*/
|
||||
@property (nonatomic) BOOL preloadAllAnimatedImageFrames;
|
||||
|
||||
@end
|
||||
|
||||
NS_ASSUME_NONNULL_END
|
254
Example/Pods/YYImage/YYImage/YYImage.m
generated
Normal file
254
Example/Pods/YYImage/YYImage/YYImage.m
generated
Normal file
@@ -0,0 +1,254 @@
|
||||
//
|
||||
// YYImage.m
|
||||
// YYImage <https://github.com/ibireme/YYImage>
|
||||
//
|
||||
// Created by ibireme on 14/10/20.
|
||||
// Copyright (c) 2015 ibireme.
|
||||
//
|
||||
// This source code is licensed under the MIT-style license found in the
|
||||
// LICENSE file in the root directory of this source tree.
|
||||
//
|
||||
|
||||
#import "YYImage.h"
|
||||
|
||||
/**
|
||||
An array of NSNumber objects, shows the best order for path scale search.
|
||||
e.g. iPhone3GS:@[@1,@2,@3] iPhone5:@[@2,@3,@1] iPhone6 Plus:@[@3,@2,@1]
|
||||
*/
|
||||
static NSArray *_NSBundlePreferredScales() {
|
||||
static NSArray *scales;
|
||||
static dispatch_once_t onceToken;
|
||||
dispatch_once(&onceToken, ^{
|
||||
CGFloat screenScale = [UIScreen mainScreen].scale;
|
||||
if (screenScale <= 1) {
|
||||
scales = @[@1,@2,@3];
|
||||
} else if (screenScale <= 2) {
|
||||
scales = @[@2,@3,@1];
|
||||
} else {
|
||||
scales = @[@3,@2,@1];
|
||||
}
|
||||
});
|
||||
return scales;
|
||||
}
|
||||
|
||||
/**
|
||||
Add scale modifier to the file name (without path extension),
|
||||
From @"name" to @"name@2x".
|
||||
|
||||
e.g.
|
||||
<table>
|
||||
<tr><th>Before </th><th>After(scale:2)</th></tr>
|
||||
<tr><td>"icon" </td><td>"icon@2x" </td></tr>
|
||||
<tr><td>"icon " </td><td>"icon @2x" </td></tr>
|
||||
<tr><td>"icon.top" </td><td>"icon.top@2x" </td></tr>
|
||||
<tr><td>"/p/name" </td><td>"/p/name@2x" </td></tr>
|
||||
<tr><td>"/path/" </td><td>"/path/" </td></tr>
|
||||
</table>
|
||||
|
||||
@param scale Resource scale.
|
||||
@return String by add scale modifier, or just return if it's not end with file name.
|
||||
*/
|
||||
static NSString *_NSStringByAppendingNameScale(NSString *string, CGFloat scale) {
|
||||
if (!string) return nil;
|
||||
if (fabs(scale - 1) <= __FLT_EPSILON__ || string.length == 0 || [string hasSuffix:@"/"]) return string.copy;
|
||||
return [string stringByAppendingFormat:@"@%@x", @(scale)];
|
||||
}
|
||||
|
||||
/**
|
||||
Return the path scale.
|
||||
|
||||
e.g.
|
||||
<table>
|
||||
<tr><th>Path </th><th>Scale </th></tr>
|
||||
<tr><td>"icon.png" </td><td>1 </td></tr>
|
||||
<tr><td>"icon@2x.png" </td><td>2 </td></tr>
|
||||
<tr><td>"icon@2.5x.png" </td><td>2.5 </td></tr>
|
||||
<tr><td>"icon@2x" </td><td>1 </td></tr>
|
||||
<tr><td>"icon@2x..png" </td><td>1 </td></tr>
|
||||
<tr><td>"icon@2x.png/" </td><td>1 </td></tr>
|
||||
</table>
|
||||
*/
|
||||
static CGFloat _NSStringPathScale(NSString *string) {
|
||||
if (string.length == 0 || [string hasSuffix:@"/"]) return 1;
|
||||
NSString *name = string.stringByDeletingPathExtension;
|
||||
__block CGFloat scale = 1;
|
||||
|
||||
NSRegularExpression *pattern = [NSRegularExpression regularExpressionWithPattern:@"@[0-9]+\\.?[0-9]*x$" options:NSRegularExpressionAnchorsMatchLines error:nil];
|
||||
[pattern enumerateMatchesInString:name options:kNilOptions range:NSMakeRange(0, name.length) usingBlock:^(NSTextCheckingResult *result, NSMatchingFlags flags, BOOL *stop) {
|
||||
if (result.range.location >= 3) {
|
||||
scale = [string substringWithRange:NSMakeRange(result.range.location + 1, result.range.length - 2)].doubleValue;
|
||||
}
|
||||
}];
|
||||
|
||||
return scale;
|
||||
}
|
||||
|
||||
|
||||
@implementation YYImage {
|
||||
YYImageDecoder *_decoder;
|
||||
NSArray *_preloadedFrames;
|
||||
dispatch_semaphore_t _preloadedLock;
|
||||
NSUInteger _bytesPerFrame;
|
||||
}
|
||||
|
||||
+ (YYImage *)imageNamed:(NSString *)name {
|
||||
if (name.length == 0) return nil;
|
||||
if ([name hasSuffix:@"/"]) return nil;
|
||||
|
||||
NSString *res = name.stringByDeletingPathExtension;
|
||||
NSString *ext = name.pathExtension;
|
||||
NSString *path = nil;
|
||||
CGFloat scale = 1;
|
||||
|
||||
// If no extension, guess by system supported (same as UIImage).
|
||||
NSArray *exts = ext.length > 0 ? @[ext] : @[@"", @"png", @"jpeg", @"jpg", @"gif", @"webp", @"apng"];
|
||||
NSArray *scales = _NSBundlePreferredScales();
|
||||
for (int s = 0; s < scales.count; s++) {
|
||||
scale = ((NSNumber *)scales[s]).floatValue;
|
||||
NSString *scaledName = _NSStringByAppendingNameScale(res, scale);
|
||||
for (NSString *e in exts) {
|
||||
path = [[NSBundle mainBundle] pathForResource:scaledName ofType:e];
|
||||
if (path) break;
|
||||
}
|
||||
if (path) break;
|
||||
}
|
||||
if (path.length == 0) return nil;
|
||||
|
||||
NSData *data = [NSData dataWithContentsOfFile:path];
|
||||
if (data.length == 0) return nil;
|
||||
|
||||
return [[self alloc] initWithData:data scale:scale];
|
||||
}
|
||||
|
||||
+ (YYImage *)imageWithContentsOfFile:(NSString *)path {
|
||||
return [[self alloc] initWithContentsOfFile:path];
|
||||
}
|
||||
|
||||
+ (YYImage *)imageWithData:(NSData *)data {
|
||||
return [[self alloc] initWithData:data];
|
||||
}
|
||||
|
||||
+ (YYImage *)imageWithData:(NSData *)data scale:(CGFloat)scale {
|
||||
return [[self alloc] initWithData:data scale:scale];
|
||||
}
|
||||
|
||||
- (instancetype)initWithContentsOfFile:(NSString *)path {
|
||||
NSData *data = [NSData dataWithContentsOfFile:path];
|
||||
return [self initWithData:data scale:_NSStringPathScale(path)];
|
||||
}
|
||||
|
||||
- (instancetype)initWithData:(NSData *)data {
|
||||
return [self initWithData:data scale:1];
|
||||
}
|
||||
|
||||
- (instancetype)initWithData:(NSData *)data scale:(CGFloat)scale {
|
||||
if (data.length == 0) return nil;
|
||||
if (scale <= 0) scale = [UIScreen mainScreen].scale;
|
||||
_preloadedLock = dispatch_semaphore_create(1);
|
||||
@autoreleasepool {
|
||||
YYImageDecoder *decoder = [YYImageDecoder decoderWithData:data scale:scale];
|
||||
YYImageFrame *frame = [decoder frameAtIndex:0 decodeForDisplay:YES];
|
||||
UIImage *image = frame.image;
|
||||
if (!image) return nil;
|
||||
self = [self initWithCGImage:image.CGImage scale:decoder.scale orientation:image.imageOrientation];
|
||||
if (!self) return nil;
|
||||
_animatedImageType = decoder.type;
|
||||
if (decoder.frameCount > 1) {
|
||||
_decoder = decoder;
|
||||
_bytesPerFrame = CGImageGetBytesPerRow(image.CGImage) * CGImageGetHeight(image.CGImage);
|
||||
_animatedImageMemorySize = _bytesPerFrame * decoder.frameCount;
|
||||
}
|
||||
self.yy_isDecodedForDisplay = YES;
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (NSData *)animatedImageData {
|
||||
return _decoder.data;
|
||||
}
|
||||
|
||||
- (void)setPreloadAllAnimatedImageFrames:(BOOL)preloadAllAnimatedImageFrames {
|
||||
if (_preloadAllAnimatedImageFrames != preloadAllAnimatedImageFrames) {
|
||||
if (preloadAllAnimatedImageFrames && _decoder.frameCount > 0) {
|
||||
NSMutableArray *frames = [NSMutableArray new];
|
||||
for (NSUInteger i = 0, max = _decoder.frameCount; i < max; i++) {
|
||||
UIImage *img = [self animatedImageFrameAtIndex:i];
|
||||
if (img) {
|
||||
[frames addObject:img];
|
||||
} else {
|
||||
[frames addObject:[NSNull null]];
|
||||
}
|
||||
}
|
||||
dispatch_semaphore_wait(_preloadedLock, DISPATCH_TIME_FOREVER);
|
||||
_preloadedFrames = frames;
|
||||
dispatch_semaphore_signal(_preloadedLock);
|
||||
} else {
|
||||
dispatch_semaphore_wait(_preloadedLock, DISPATCH_TIME_FOREVER);
|
||||
_preloadedFrames = nil;
|
||||
dispatch_semaphore_signal(_preloadedLock);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#pragma mark - protocol NSCoding
|
||||
|
||||
- (instancetype)initWithCoder:(NSCoder *)aDecoder {
|
||||
NSNumber *scale = [aDecoder decodeObjectForKey:@"YYImageScale"];
|
||||
NSData *data = [aDecoder decodeObjectForKey:@"YYImageData"];
|
||||
if (data.length) {
|
||||
self = [self initWithData:data scale:scale.doubleValue];
|
||||
} else {
|
||||
self = [super initWithCoder:aDecoder];
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)encodeWithCoder:(NSCoder *)aCoder {
|
||||
if (_decoder.data.length) {
|
||||
[aCoder encodeObject:@(self.scale) forKey:@"YYImageScale"];
|
||||
[aCoder encodeObject:_decoder.data forKey:@"YYImageData"];
|
||||
} else {
|
||||
[super encodeWithCoder:aCoder]; // Apple use UIImagePNGRepresentation() to encode UIImage.
|
||||
}
|
||||
}
|
||||
|
||||
#pragma mark - protocol YYAnimatedImage
|
||||
|
||||
- (NSUInteger)animatedImageFrameCount {
|
||||
return _decoder.frameCount;
|
||||
}
|
||||
|
||||
- (NSUInteger)animatedImageLoopCount {
|
||||
return _decoder.loopCount;
|
||||
}
|
||||
|
||||
- (NSUInteger)animatedImageBytesPerFrame {
|
||||
return _bytesPerFrame;
|
||||
}
|
||||
|
||||
- (UIImage *)animatedImageFrameAtIndex:(NSUInteger)index {
|
||||
if (index >= _decoder.frameCount) return nil;
|
||||
dispatch_semaphore_wait(_preloadedLock, DISPATCH_TIME_FOREVER);
|
||||
UIImage *image = _preloadedFrames[index];
|
||||
dispatch_semaphore_signal(_preloadedLock);
|
||||
if (image) return image == (id)[NSNull null] ? nil : image;
|
||||
return [_decoder frameAtIndex:index decodeForDisplay:YES].image;
|
||||
}
|
||||
|
||||
- (NSTimeInterval)animatedImageDurationAtIndex:(NSUInteger)index {
|
||||
NSTimeInterval duration = [_decoder frameDurationAtIndex:index];
|
||||
|
||||
/*
|
||||
http://opensource.apple.com/source/WebCore/WebCore-7600.1.25/platform/graphics/cg/ImageSourceCG.cpp
|
||||
Many annoying ads specify a 0 duration to make an image flash as quickly as
|
||||
possible. We follow Safari and Firefox's behavior and use a duration of 100 ms
|
||||
for any frames that specify a duration of <= 10 ms.
|
||||
See <rdar://problem/7689300> and <http://webkit.org/b/36082> for more information.
|
||||
|
||||
See also: http://nullsleep.tumblr.com/post/16524517190/animated-gif-minimum-frame-delay-browser.
|
||||
*/
|
||||
if (duration < 0.011f) return 0.100f;
|
||||
return duration;
|
||||
}
|
||||
|
||||
@end
|
505
Example/Pods/YYImage/YYImage/YYImageCoder.h
generated
Normal file
505
Example/Pods/YYImage/YYImage/YYImageCoder.h
generated
Normal file
@@ -0,0 +1,505 @@
|
||||
//
|
||||
// YYImageCoder.h
|
||||
// YYImage <https://github.com/ibireme/YYImage>
|
||||
//
|
||||
// Created by ibireme on 15/5/13.
|
||||
// Copyright (c) 2015 ibireme.
|
||||
//
|
||||
// This source code is licensed under the MIT-style license found in the
|
||||
// LICENSE file in the root directory of this source tree.
|
||||
//
|
||||
|
||||
#import <UIKit/UIKit.h>
|
||||
|
||||
NS_ASSUME_NONNULL_BEGIN
|
||||
|
||||
/**
|
||||
Image file type.
|
||||
*/
|
||||
typedef NS_ENUM(NSUInteger, YYImageType) {
|
||||
YYImageTypeUnknown = 0, ///< unknown
|
||||
YYImageTypeJPEG, ///< jpeg, jpg
|
||||
YYImageTypeJPEG2000, ///< jp2
|
||||
YYImageTypeTIFF, ///< tiff, tif
|
||||
YYImageTypeBMP, ///< bmp
|
||||
YYImageTypeICO, ///< ico
|
||||
YYImageTypeICNS, ///< icns
|
||||
YYImageTypeGIF, ///< gif
|
||||
YYImageTypePNG, ///< png
|
||||
YYImageTypeWebP, ///< webp
|
||||
YYImageTypeOther, ///< other image format
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
Dispose method specifies how the area used by the current frame is to be treated
|
||||
before rendering the next frame on the canvas.
|
||||
*/
|
||||
typedef NS_ENUM(NSUInteger, YYImageDisposeMethod) {
|
||||
|
||||
/**
|
||||
No disposal is done on this frame before rendering the next; the contents
|
||||
of the canvas are left as is.
|
||||
*/
|
||||
YYImageDisposeNone = 0,
|
||||
|
||||
/**
|
||||
The frame's region of the canvas is to be cleared to fully transparent black
|
||||
before rendering the next frame.
|
||||
*/
|
||||
YYImageDisposeBackground,
|
||||
|
||||
/**
|
||||
The frame's region of the canvas is to be reverted to the previous contents
|
||||
before rendering the next frame.
|
||||
*/
|
||||
YYImageDisposePrevious,
|
||||
};
|
||||
|
||||
/**
|
||||
Blend operation specifies how transparent pixels of the current frame are
|
||||
blended with those of the previous canvas.
|
||||
*/
|
||||
typedef NS_ENUM(NSUInteger, YYImageBlendOperation) {
|
||||
|
||||
/**
|
||||
All color components of the frame, including alpha, overwrite the current
|
||||
contents of the frame's canvas region.
|
||||
*/
|
||||
YYImageBlendNone = 0,
|
||||
|
||||
/**
|
||||
The frame should be composited onto the output buffer based on its alpha.
|
||||
*/
|
||||
YYImageBlendOver,
|
||||
};
|
||||
|
||||
/**
|
||||
An image frame object.
|
||||
*/
|
||||
@interface YYImageFrame : NSObject <NSCopying>
|
||||
@property (nonatomic) NSUInteger index; ///< Frame index (zero based)
|
||||
@property (nonatomic) NSUInteger width; ///< Frame width
|
||||
@property (nonatomic) NSUInteger height; ///< Frame height
|
||||
@property (nonatomic) NSUInteger offsetX; ///< Frame origin.x in canvas (left-bottom based)
|
||||
@property (nonatomic) NSUInteger offsetY; ///< Frame origin.y in canvas (left-bottom based)
|
||||
@property (nonatomic) NSTimeInterval duration; ///< Frame duration in seconds
|
||||
@property (nonatomic) YYImageDisposeMethod dispose; ///< Frame dispose method.
|
||||
@property (nonatomic) YYImageBlendOperation blend; ///< Frame blend operation.
|
||||
@property (nullable, nonatomic, strong) UIImage *image; ///< The image.
|
||||
+ (instancetype)frameWithImage:(UIImage *)image;
|
||||
@end
|
||||
|
||||
|
||||
#pragma mark - Decoder
|
||||
|
||||
/**
|
||||
An image decoder to decode image data.
|
||||
|
||||
@discussion This class supports decoding animated WebP, APNG, GIF and system
|
||||
image format such as PNG, JPG, JP2, BMP, TIFF, PIC, ICNS and ICO. It can be used
|
||||
to decode complete image data, or to decode incremental image data during image
|
||||
download. This class is thread-safe.
|
||||
|
||||
Example:
|
||||
|
||||
// Decode single image:
|
||||
NSData *data = [NSData dataWithContentOfFile:@"/tmp/image.webp"];
|
||||
YYImageDecoder *decoder = [YYImageDecoder decoderWithData:data scale:2.0];
|
||||
UIImage image = [decoder frameAtIndex:0 decodeForDisplay:YES].image;
|
||||
|
||||
// Decode image during download:
|
||||
NSMutableData *data = [NSMutableData new];
|
||||
YYImageDecoder *decoder = [[YYImageDecoder alloc] initWithScale:2.0];
|
||||
while(newDataArrived) {
|
||||
[data appendData:newData];
|
||||
[decoder updateData:data final:NO];
|
||||
if (decoder.frameCount > 0) {
|
||||
UIImage image = [decoder frameAtIndex:0 decodeForDisplay:YES].image;
|
||||
// progressive display...
|
||||
}
|
||||
}
|
||||
[decoder updateData:data final:YES];
|
||||
UIImage image = [decoder frameAtIndex:0 decodeForDisplay:YES].image;
|
||||
// final display...
|
||||
|
||||
*/
|
||||
@interface YYImageDecoder : NSObject
|
||||
|
||||
@property (nullable, nonatomic, readonly) NSData *data; ///< Image data.
|
||||
@property (nonatomic, readonly) YYImageType type; ///< Image data type.
|
||||
@property (nonatomic, readonly) CGFloat scale; ///< Image scale.
|
||||
@property (nonatomic, readonly) NSUInteger frameCount; ///< Image frame count.
|
||||
@property (nonatomic, readonly) NSUInteger loopCount; ///< Image loop count, 0 means infinite.
|
||||
@property (nonatomic, readonly) NSUInteger width; ///< Image canvas width.
|
||||
@property (nonatomic, readonly) NSUInteger height; ///< Image canvas height.
|
||||
@property (nonatomic, readonly, getter=isFinalized) BOOL finalized;
|
||||
|
||||
/**
|
||||
Creates an image decoder.
|
||||
|
||||
@param scale Image's scale.
|
||||
@return An image decoder.
|
||||
*/
|
||||
- (instancetype)initWithScale:(CGFloat)scale NS_DESIGNATED_INITIALIZER;
|
||||
|
||||
/**
|
||||
Updates the incremental image with new data.
|
||||
|
||||
@discussion You can use this method to decode progressive/interlaced/baseline
|
||||
image when you do not have the complete image data. The `data` was retained by
|
||||
decoder, you should not modify the data in other thread during decoding.
|
||||
|
||||
@param data The data to add to the image decoder. Each time you call this
|
||||
function, the 'data' parameter must contain all of the image file data
|
||||
accumulated so far.
|
||||
|
||||
@param final A value that specifies whether the data is the final set.
|
||||
Pass YES if it is, NO otherwise. When the data is already finalized, you can
|
||||
not update the data anymore.
|
||||
|
||||
@return Whether succeed.
|
||||
*/
|
||||
- (BOOL)updateData:(nullable NSData *)data final:(BOOL)final;
|
||||
|
||||
/**
|
||||
Convenience method to create a decoder with specified data.
|
||||
@param data Image data.
|
||||
@param scale Image's scale.
|
||||
@return A new decoder, or nil if an error occurs.
|
||||
*/
|
||||
+ (nullable instancetype)decoderWithData:(NSData *)data scale:(CGFloat)scale;
|
||||
|
||||
/**
|
||||
Decodes and returns a frame from a specified index.
|
||||
@param index Frame image index (zero-based).
|
||||
@param decodeForDisplay Whether decode the image to memory bitmap for display.
|
||||
If NO, it will try to returns the original frame data without blend.
|
||||
@return A new frame with image, or nil if an error occurs.
|
||||
*/
|
||||
- (nullable YYImageFrame *)frameAtIndex:(NSUInteger)index decodeForDisplay:(BOOL)decodeForDisplay;
|
||||
|
||||
/**
|
||||
Returns the frame duration from a specified index.
|
||||
@param index Frame image (zero-based).
|
||||
@return Duration in seconds.
|
||||
*/
|
||||
- (NSTimeInterval)frameDurationAtIndex:(NSUInteger)index;
|
||||
|
||||
/**
|
||||
Returns the frame's properties. See "CGImageProperties.h" in ImageIO.framework
|
||||
for more information.
|
||||
|
||||
@param index Frame image index (zero-based).
|
||||
@return The ImageIO frame property.
|
||||
*/
|
||||
- (nullable NSDictionary *)framePropertiesAtIndex:(NSUInteger)index;
|
||||
|
||||
/**
|
||||
Returns the image's properties. See "CGImageProperties.h" in ImageIO.framework
|
||||
for more information.
|
||||
*/
|
||||
- (nullable NSDictionary *)imageProperties;
|
||||
|
||||
@end
|
||||
|
||||
|
||||
|
||||
#pragma mark - Encoder
|
||||
|
||||
/**
|
||||
An image encoder to encode image to data.
|
||||
|
||||
@discussion It supports encoding single frame image with the type defined in YYImageType.
|
||||
It also supports encoding multi-frame image with GIF, APNG and WebP.
|
||||
|
||||
Example:
|
||||
|
||||
YYImageEncoder *jpegEncoder = [[YYImageEncoder alloc] initWithType:YYImageTypeJPEG];
|
||||
jpegEncoder.quality = 0.9;
|
||||
[jpegEncoder addImage:image duration:0];
|
||||
NSData jpegData = [jpegEncoder encode];
|
||||
|
||||
YYImageEncoder *gifEncoder = [[YYImageEncoder alloc] initWithType:YYImageTypeGIF];
|
||||
gifEncoder.loopCount = 5;
|
||||
[gifEncoder addImage:image0 duration:0.1];
|
||||
[gifEncoder addImage:image1 duration:0.15];
|
||||
[gifEncoder addImage:image2 duration:0.2];
|
||||
NSData gifData = [gifEncoder encode];
|
||||
|
||||
@warning It just pack the images together when encoding multi-frame image. If you
|
||||
want to reduce the image file size, try imagemagick/ffmpeg for GIF and WebP,
|
||||
and apngasm for APNG.
|
||||
*/
|
||||
@interface YYImageEncoder : NSObject
|
||||
|
||||
@property (nonatomic, readonly) YYImageType type; ///< Image type.
|
||||
@property (nonatomic) NSUInteger loopCount; ///< Loop count, 0 means infinit, only available for GIF/APNG/WebP.
|
||||
@property (nonatomic) BOOL lossless; ///< Lossless, only available for WebP.
|
||||
@property (nonatomic) CGFloat quality; ///< Compress quality, 0.0~1.0, only available for JPG/JP2/WebP.
|
||||
|
||||
- (instancetype)init UNAVAILABLE_ATTRIBUTE;
|
||||
+ (instancetype)new UNAVAILABLE_ATTRIBUTE;
|
||||
|
||||
/**
|
||||
Create an image encoder with a specified type.
|
||||
@param type Image type.
|
||||
@return A new encoder, or nil if an error occurs.
|
||||
*/
|
||||
- (nullable instancetype)initWithType:(YYImageType)type NS_DESIGNATED_INITIALIZER;
|
||||
|
||||
/**
|
||||
Add an image to encoder.
|
||||
@param image Image.
|
||||
@param duration Image duration for animation. Pass 0 to ignore this parameter.
|
||||
*/
|
||||
- (void)addImage:(UIImage *)image duration:(NSTimeInterval)duration;
|
||||
|
||||
/**
|
||||
Add an image with image data to encoder.
|
||||
@param data Image data.
|
||||
@param duration Image duration for animation. Pass 0 to ignore this parameter.
|
||||
*/
|
||||
- (void)addImageWithData:(NSData *)data duration:(NSTimeInterval)duration;
|
||||
|
||||
/**
|
||||
Add an image from a file path to encoder.
|
||||
@param image Image file path.
|
||||
@param duration Image duration for animation. Pass 0 to ignore this parameter.
|
||||
*/
|
||||
- (void)addImageWithFile:(NSString *)path duration:(NSTimeInterval)duration;
|
||||
|
||||
/**
|
||||
Encodes the image and returns the image data.
|
||||
@return The image data, or nil if an error occurs.
|
||||
*/
|
||||
- (nullable NSData *)encode;
|
||||
|
||||
/**
|
||||
Encodes the image to a file.
|
||||
@param path The file path (overwrite if exist).
|
||||
@return Whether succeed.
|
||||
*/
|
||||
- (BOOL)encodeToFile:(NSString *)path;
|
||||
|
||||
/**
|
||||
Convenience method to encode single frame image.
|
||||
@param image The image.
|
||||
@param type The destination image type.
|
||||
@param quality Image quality, 0.0~1.0.
|
||||
@return The image data, or nil if an error occurs.
|
||||
*/
|
||||
+ (nullable NSData *)encodeImage:(UIImage *)image type:(YYImageType)type quality:(CGFloat)quality;
|
||||
|
||||
/**
|
||||
Convenience method to encode image from a decoder.
|
||||
@param decoder The image decoder.
|
||||
@param type The destination image type;
|
||||
@param quality Image quality, 0.0~1.0.
|
||||
@return The image data, or nil if an error occurs.
|
||||
*/
|
||||
+ (nullable NSData *)encodeImageWithDecoder:(YYImageDecoder *)decoder type:(YYImageType)type quality:(CGFloat)quality;
|
||||
|
||||
@end
|
||||
|
||||
|
||||
#pragma mark - UIImage
|
||||
|
||||
@interface UIImage (YYImageCoder)
|
||||
|
||||
/**
|
||||
Decompress this image to bitmap, so when the image is displayed on screen,
|
||||
the main thread won't be blocked by additional decode. If the image has already
|
||||
been decoded or unable to decode, it just returns itself.
|
||||
|
||||
@return an image decoded, or just return itself if no needed.
|
||||
@see yy_isDecodedForDisplay
|
||||
*/
|
||||
- (instancetype)yy_imageByDecoded;
|
||||
|
||||
/**
|
||||
Wherher the image can be display on screen without additional decoding.
|
||||
@warning It just a hint for your code, change it has no other effect.
|
||||
*/
|
||||
@property (nonatomic) BOOL yy_isDecodedForDisplay;
|
||||
|
||||
/**
|
||||
Saves this image to iOS Photos Album.
|
||||
|
||||
@discussion This method attempts to save the original data to album if the
|
||||
image is created from an animated GIF/APNG, otherwise, it will save the image
|
||||
as JPEG or PNG (based on the alpha information).
|
||||
|
||||
@param completionBlock The block invoked (in main thread) after the save operation completes.
|
||||
assetURL: An URL that identifies the saved image file. If the image is not saved, assetURL is nil.
|
||||
error: If the image is not saved, an error object that describes the reason for failure, otherwise nil.
|
||||
*/
|
||||
- (void)yy_saveToAlbumWithCompletionBlock:(nullable void(^)(NSURL * _Nullable assetURL, NSError * _Nullable error))completionBlock;
|
||||
|
||||
/**
|
||||
Return a 'best' data representation for this image.
|
||||
|
||||
@discussion The convertion based on these rule:
|
||||
1. If the image is created from an animated GIF/APNG/WebP, it returns the original data.
|
||||
2. It returns PNG or JPEG(0.9) representation based on the alpha information.
|
||||
|
||||
@return Image data, or nil if an error occurs.
|
||||
*/
|
||||
- (nullable NSData *)yy_imageDataRepresentation;
|
||||
|
||||
@end
|
||||
|
||||
|
||||
|
||||
#pragma mark - Helper
|
||||
|
||||
/// Detect a data's image type by reading the data's header 16 bytes (very fast).
|
||||
CG_EXTERN YYImageType YYImageDetectType(CFDataRef data);
|
||||
|
||||
/// Convert YYImageType to UTI (such as kUTTypeJPEG).
|
||||
CG_EXTERN CFStringRef _Nullable YYImageTypeToUTType(YYImageType type);
|
||||
|
||||
/// Convert UTI (such as kUTTypeJPEG) to YYImageType.
|
||||
CG_EXTERN YYImageType YYImageTypeFromUTType(CFStringRef uti);
|
||||
|
||||
/// Get image type's file extension (such as @"jpg").
|
||||
CG_EXTERN NSString *_Nullable YYImageTypeGetExtension(YYImageType type);
|
||||
|
||||
|
||||
|
||||
/// Returns the shared DeviceRGB color space.
|
||||
CG_EXTERN CGColorSpaceRef YYCGColorSpaceGetDeviceRGB();
|
||||
|
||||
/// Returns the shared DeviceGray color space.
|
||||
CG_EXTERN CGColorSpaceRef YYCGColorSpaceGetDeviceGray();
|
||||
|
||||
/// Returns whether a color space is DeviceRGB.
|
||||
CG_EXTERN BOOL YYCGColorSpaceIsDeviceRGB(CGColorSpaceRef space);
|
||||
|
||||
/// Returns whether a color space is DeviceGray.
|
||||
CG_EXTERN BOOL YYCGColorSpaceIsDeviceGray(CGColorSpaceRef space);
|
||||
|
||||
|
||||
|
||||
/// Convert EXIF orientation value to UIImageOrientation.
|
||||
CG_EXTERN UIImageOrientation YYUIImageOrientationFromEXIFValue(NSInteger value);
|
||||
|
||||
/// Convert UIImageOrientation to EXIF orientation value.
|
||||
CG_EXTERN NSInteger YYUIImageOrientationToEXIFValue(UIImageOrientation orientation);
|
||||
|
||||
|
||||
|
||||
/**
|
||||
Create a decoded image.
|
||||
|
||||
@discussion If the source image is created from a compressed image data (such as
|
||||
PNG or JPEG), you can use this method to decode the image. After decoded, you can
|
||||
access the decoded bytes with CGImageGetDataProvider() and CGDataProviderCopyData()
|
||||
without additional decode process. If the image has already decoded, this method
|
||||
just copy the decoded bytes to the new image.
|
||||
|
||||
@param imageRef The source image.
|
||||
@param decodeForDisplay If YES, this method will decode the image and convert
|
||||
it to BGRA8888 (premultiplied) or BGRX8888 format for CALayer display.
|
||||
|
||||
@return A decoded image, or NULL if an error occurs.
|
||||
*/
|
||||
CG_EXTERN CGImageRef _Nullable YYCGImageCreateDecodedCopy(CGImageRef imageRef, BOOL decodeForDisplay);
|
||||
|
||||
/**
|
||||
Create an image copy with an orientation.
|
||||
|
||||
@param imageRef Source image
|
||||
@param orientation Image orientation which will applied to the image.
|
||||
@param destBitmapInfo Destimation image bitmap, only support 32bit format (such as ARGB8888).
|
||||
@return A new image, or NULL if an error occurs.
|
||||
*/
|
||||
CG_EXTERN CGImageRef _Nullable YYCGImageCreateCopyWithOrientation(CGImageRef imageRef,
|
||||
UIImageOrientation orientation,
|
||||
CGBitmapInfo destBitmapInfo);
|
||||
|
||||
/**
|
||||
Create an image copy with CGAffineTransform.
|
||||
|
||||
@param imageRef Source image.
|
||||
@param transform Transform applied to image (left-bottom based coordinate system).
|
||||
@param destSize Destination image size
|
||||
@param destBitmapInfo Destimation image bitmap, only support 32bit format (such as ARGB8888).
|
||||
@return A new image, or NULL if an error occurs.
|
||||
*/
|
||||
CG_EXTERN CGImageRef _Nullable YYCGImageCreateAffineTransformCopy(CGImageRef imageRef,
|
||||
CGAffineTransform transform,
|
||||
CGSize destSize,
|
||||
CGBitmapInfo destBitmapInfo);
|
||||
|
||||
/**
|
||||
Encode an image to data with CGImageDestination.
|
||||
|
||||
@param imageRef The image.
|
||||
@param type The image destination data type.
|
||||
@param quality The quality (0.0~1.0)
|
||||
@return A new image data, or nil if an error occurs.
|
||||
*/
|
||||
CG_EXTERN CFDataRef _Nullable YYCGImageCreateEncodedData(CGImageRef imageRef, YYImageType type, CGFloat quality);
|
||||
|
||||
|
||||
/**
|
||||
Whether WebP is available in YYImage.
|
||||
*/
|
||||
CG_EXTERN BOOL YYImageWebPAvailable();
|
||||
|
||||
/**
|
||||
Get a webp image frame count;
|
||||
|
||||
@param webpData WebP data.
|
||||
@return Image frame count, or 0 if an error occurs.
|
||||
*/
|
||||
CG_EXTERN NSUInteger YYImageGetWebPFrameCount(CFDataRef webpData);
|
||||
|
||||
/**
|
||||
Decode an image from WebP data, returns NULL if an error occurs.
|
||||
|
||||
@param webpData The WebP data.
|
||||
@param decodeForDisplay If YES, this method will decode the image and convert it
|
||||
to BGRA8888 (premultiplied) format for CALayer display.
|
||||
@param useThreads YES to enable multi-thread decode.
|
||||
(speed up, but cost more CPU)
|
||||
@param bypassFiltering YES to skip the in-loop filtering.
|
||||
(speed up, but may lose some smooth)
|
||||
@param noFancyUpsampling YES to use faster pointwise upsampler.
|
||||
(speed down, and may lose some details).
|
||||
@return The decoded image, or NULL if an error occurs.
|
||||
*/
|
||||
CG_EXTERN CGImageRef _Nullable YYCGImageCreateWithWebPData(CFDataRef webpData,
|
||||
BOOL decodeForDisplay,
|
||||
BOOL useThreads,
|
||||
BOOL bypassFiltering,
|
||||
BOOL noFancyUpsampling);
|
||||
|
||||
typedef NS_ENUM(NSUInteger, YYImagePreset) {
|
||||
YYImagePresetDefault = 0, ///< default preset.
|
||||
YYImagePresetPicture, ///< digital picture, like portrait, inner shot
|
||||
YYImagePresetPhoto, ///< outdoor photograph, with natural lighting
|
||||
YYImagePresetDrawing, ///< hand or line drawing, with high-contrast details
|
||||
YYImagePresetIcon, ///< small-sized colorful images
|
||||
YYImagePresetText ///< text-like
|
||||
};
|
||||
|
||||
/**
|
||||
Encode a CGImage to WebP data
|
||||
|
||||
@param imageRef image
|
||||
@param lossless YES=lossless (similar to PNG), NO=lossy (similar to JPEG)
|
||||
@param quality 0.0~1.0 (0=smallest file, 1.0=biggest file)
|
||||
For lossless image, try the value near 1.0; for lossy, try the value near 0.8.
|
||||
@param compressLevel 0~6 (0=fast, 6=slower-better). Default is 4.
|
||||
@param preset Preset for different image type, default is YYImagePresetDefault.
|
||||
@return WebP data, or nil if an error occurs.
|
||||
*/
|
||||
CG_EXTERN CFDataRef _Nullable YYCGImageCreateEncodedWebPData(CGImageRef imageRef,
|
||||
BOOL lossless,
|
||||
CGFloat quality,
|
||||
int compressLevel,
|
||||
YYImagePreset preset);
|
||||
|
||||
NS_ASSUME_NONNULL_END
|
2870
Example/Pods/YYImage/YYImage/YYImageCoder.m
generated
Normal file
2870
Example/Pods/YYImage/YYImage/YYImageCoder.m
generated
Normal file
File diff suppressed because it is too large
Load Diff
104
Example/Pods/YYImage/YYImage/YYSpriteSheetImage.h
generated
Normal file
104
Example/Pods/YYImage/YYImage/YYSpriteSheetImage.h
generated
Normal file
@@ -0,0 +1,104 @@
|
||||
//
|
||||
// YYSpriteImage.h
|
||||
// YYImage <https://github.com/ibireme/YYImage>
|
||||
//
|
||||
// Created by ibireme on 15/4/21.
|
||||
// Copyright (c) 2015 ibireme.
|
||||
//
|
||||
// This source code is licensed under the MIT-style license found in the
|
||||
// LICENSE file in the root directory of this source tree.
|
||||
//
|
||||
|
||||
#import <UIKit/UIKit.h>
|
||||
|
||||
#if __has_include(<YYImage/YYImage.h>)
|
||||
#import <YYImage/YYAnimatedImageView.h>
|
||||
#elif __has_include(<YYWebImage/YYImage.h>)
|
||||
#import <YYWebImage/YYAnimatedImageView.h>
|
||||
#else
|
||||
#import "YYAnimatedImageView.h"
|
||||
#endif
|
||||
|
||||
NS_ASSUME_NONNULL_BEGIN
|
||||
|
||||
/**
|
||||
An image to display sprite sheet animation.
|
||||
|
||||
@discussion It is a fully compatible `UIImage` subclass.
|
||||
The animation can be played by YYAnimatedImageView.
|
||||
|
||||
Sample Code:
|
||||
|
||||
// 8 * 12 sprites in a single sheet image
|
||||
UIImage *spriteSheet = [UIImage imageNamed:@"sprite-sheet"];
|
||||
NSMutableArray *contentRects = [NSMutableArray new];
|
||||
NSMutableArray *durations = [NSMutableArray new];
|
||||
for (int j = 0; j < 12; j++) {
|
||||
for (int i = 0; i < 8; i++) {
|
||||
CGRect rect;
|
||||
rect.size = CGSizeMake(img.size.width / 8, img.size.height / 12);
|
||||
rect.origin.x = img.size.width / 8 * i;
|
||||
rect.origin.y = img.size.height / 12 * j;
|
||||
[contentRects addObject:[NSValue valueWithCGRect:rect]];
|
||||
[durations addObject:@(1 / 60.0)];
|
||||
}
|
||||
}
|
||||
YYSpriteSheetImage *sprite;
|
||||
sprite = [[YYSpriteSheetImage alloc] initWithSpriteSheetImage:img
|
||||
contentRects:contentRects
|
||||
frameDurations:durations
|
||||
loopCount:0];
|
||||
YYAnimatedImageView *imgView = [YYAnimatedImageView new];
|
||||
imgView.size = CGSizeMake(img.size.width / 8, img.size.height / 12);
|
||||
imgView.image = sprite;
|
||||
|
||||
|
||||
|
||||
@discussion It can also be used to display single frame in sprite sheet image.
|
||||
Sample Code:
|
||||
|
||||
YYSpriteSheetImage *sheet = ...;
|
||||
UIImageView *imageView = ...;
|
||||
imageView.image = sheet;
|
||||
imageView.layer.contentsRect = [sheet contentsRectForCALayerAtIndex:6];
|
||||
|
||||
*/
|
||||
@interface YYSpriteSheetImage : UIImage <YYAnimatedImage>
|
||||
|
||||
/**
|
||||
Creates and returns an image object.
|
||||
|
||||
@param image The sprite sheet image (contains all frames).
|
||||
|
||||
@param contentRects The sprite sheet image frame rects in the image coordinates.
|
||||
The rectangle should not outside the image's bounds. The objects in this array
|
||||
should be created with [NSValue valueWithCGRect:].
|
||||
|
||||
@param frameDurations The sprite sheet image frame's durations in seconds.
|
||||
The objects in this array should be NSNumber.
|
||||
|
||||
@param loopCount Animation loop count, 0 means infinite looping.
|
||||
|
||||
@return An image object, or nil if an error occurs.
|
||||
*/
|
||||
- (nullable instancetype)initWithSpriteSheetImage:(UIImage *)image
|
||||
contentRects:(NSArray<NSValue *> *)contentRects
|
||||
frameDurations:(NSArray<NSNumber *> *)frameDurations
|
||||
loopCount:(NSUInteger)loopCount;
|
||||
|
||||
@property (nonatomic, readonly) NSArray<NSValue *> *contentRects;
|
||||
@property (nonatomic, readonly) NSArray<NSValue *> *frameDurations;
|
||||
@property (nonatomic, readonly) NSUInteger loopCount;
|
||||
|
||||
/**
|
||||
Get the contents rect for CALayer.
|
||||
See "contentsRect" property in CALayer for more information.
|
||||
|
||||
@param index Index of frame.
|
||||
@return Contents Rect.
|
||||
*/
|
||||
- (CGRect)contentsRectForCALayerAtIndex:(NSUInteger)index;
|
||||
|
||||
@end
|
||||
|
||||
NS_ASSUME_NONNULL_END
|
80
Example/Pods/YYImage/YYImage/YYSpriteSheetImage.m
generated
Normal file
80
Example/Pods/YYImage/YYImage/YYSpriteSheetImage.m
generated
Normal file
@@ -0,0 +1,80 @@
|
||||
//
|
||||
// YYSpriteImage.m
|
||||
// YYImage <https://github.com/ibireme/YYImage>
|
||||
//
|
||||
// Created by ibireme on 15/4/21.
|
||||
// Copyright (c) 2015 ibireme.
|
||||
//
|
||||
// This source code is licensed under the MIT-style license found in the
|
||||
// LICENSE file in the root directory of this source tree.
|
||||
//
|
||||
|
||||
#import "YYSpriteSheetImage.h"
|
||||
|
||||
@implementation YYSpriteSheetImage
|
||||
|
||||
- (instancetype)initWithSpriteSheetImage:(UIImage *)image
|
||||
contentRects:(NSArray *)contentRects
|
||||
frameDurations:(NSArray *)frameDurations
|
||||
loopCount:(NSUInteger)loopCount {
|
||||
if (!image.CGImage) return nil;
|
||||
if (contentRects.count < 1 || frameDurations.count < 1) return nil;
|
||||
if (contentRects.count != frameDurations.count) return nil;
|
||||
|
||||
self = [super initWithCGImage:image.CGImage scale:image.scale orientation:image.imageOrientation];
|
||||
if (!self) return nil;
|
||||
|
||||
_contentRects = contentRects.copy;
|
||||
_frameDurations = frameDurations.copy;
|
||||
_loopCount = loopCount;
|
||||
return self;
|
||||
}
|
||||
|
||||
- (CGRect)contentsRectForCALayerAtIndex:(NSUInteger)index {
|
||||
CGRect layerRect = CGRectMake(0, 0, 1, 1);
|
||||
if (index >= _contentRects.count) return layerRect;
|
||||
|
||||
CGSize imageSize = self.size;
|
||||
CGRect rect = [self animatedImageContentsRectAtIndex:index];
|
||||
if (imageSize.width > 0.01 && imageSize.height > 0.01) {
|
||||
layerRect.origin.x = rect.origin.x / imageSize.width;
|
||||
layerRect.origin.y = rect.origin.y / imageSize.height;
|
||||
layerRect.size.width = rect.size.width / imageSize.width;
|
||||
layerRect.size.height = rect.size.height / imageSize.height;
|
||||
layerRect = CGRectIntersection(layerRect, CGRectMake(0, 0, 1, 1));
|
||||
if (CGRectIsNull(layerRect) || CGRectIsEmpty(layerRect)) {
|
||||
layerRect = CGRectMake(0, 0, 1, 1);
|
||||
}
|
||||
}
|
||||
return layerRect;
|
||||
}
|
||||
|
||||
#pragma mark @protocol YYAnimatedImage
|
||||
|
||||
- (NSUInteger)animatedImageFrameCount {
|
||||
return _contentRects.count;
|
||||
}
|
||||
|
||||
- (NSUInteger)animatedImageLoopCount {
|
||||
return _loopCount;
|
||||
}
|
||||
|
||||
- (NSUInteger)animatedImageBytesPerFrame {
|
||||
return 0;
|
||||
}
|
||||
|
||||
- (UIImage *)animatedImageFrameAtIndex:(NSUInteger)index {
|
||||
return self;
|
||||
}
|
||||
|
||||
- (NSTimeInterval)animatedImageDurationAtIndex:(NSUInteger)index {
|
||||
if (index >= _frameDurations.count) return 0;
|
||||
return ((NSNumber *)_frameDurations[index]).doubleValue;
|
||||
}
|
||||
|
||||
- (CGRect)animatedImageContentsRectAtIndex:(NSUInteger)index {
|
||||
if (index >= _contentRects.count) return CGRectZero;
|
||||
return ((NSValue *)_contentRects[index]).CGRectValue;
|
||||
}
|
||||
|
||||
@end
|
Reference in New Issue
Block a user