text stringlengths 9 39.2M | dir stringlengths 25 226 | lang stringclasses 163 values | created_date timestamp[s] | updated_date timestamp[s] | repo_name stringclasses 751 values | repo_full_name stringclasses 752 values | star int64 1.01k 183k | len_tokens int64 1 18.5M |
|---|---|---|---|---|---|---|---|---|
```objective-c
#import "GPUImageFilter.h"
@interface GPUImageZoomBlurFilter : GPUImageFilter
/** A multiplier for the blur size, ranging from 0.0 on up, with a default of 1.0
*/
@property (readwrite, nonatomic) CGFloat blurSize;
/** The normalized center of the blur. (0.5, 0.5) by default
*/
@property (readwrite, nonatomic) CGPoint blurCenter;
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageZoomBlurFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 92 |
```objective-c
#import "GPUImageHalftoneFilter.h"
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageHalftoneFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform highp float fractionalWidthOfPixel;
uniform highp float aspectRatio;
uniform highp float dotScaling;
const highp vec3 W = vec3(0.2125, 0.7154, 0.0721);
void main()
{
highp vec2 sampleDivisor = vec2(fractionalWidthOfPixel, fractionalWidthOfPixel / aspectRatio);
highp vec2 samplePos = textureCoordinate - mod(textureCoordinate, sampleDivisor) + 0.5 * sampleDivisor;
highp vec2 textureCoordinateToUse = vec2(textureCoordinate.x, (textureCoordinate.y * aspectRatio + 0.5 - 0.5 * aspectRatio));
highp vec2 adjustedSamplePos = vec2(samplePos.x, (samplePos.y * aspectRatio + 0.5 - 0.5 * aspectRatio));
highp float distanceFromSamplePoint = distance(adjustedSamplePos, textureCoordinateToUse);
lowp vec3 sampledColor = texture2D(inputImageTexture, samplePos ).rgb;
highp float dotScaling = 1.0 - dot(sampledColor, W);
lowp float checkForPresenceWithinDot = 1.0 - step(distanceFromSamplePoint, (fractionalWidthOfPixel * 0.5) * dotScaling);
gl_FragColor = vec4(vec3(checkForPresenceWithinDot), 1.0);
}
);
#else
NSString *const kGPUImageHalftoneFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform float fractionalWidthOfPixel;
uniform float aspectRatio;
uniform float dotScaling;
const vec3 W = vec3(0.2125, 0.7154, 0.0721);
void main()
{
vec2 sampleDivisor = vec2(fractionalWidthOfPixel, fractionalWidthOfPixel / aspectRatio);
vec2 samplePos = textureCoordinate - mod(textureCoordinate, sampleDivisor) + 0.5 * sampleDivisor;
vec2 textureCoordinateToUse = vec2(textureCoordinate.x, (textureCoordinate.y * aspectRatio + 0.5 - 0.5 * aspectRatio));
vec2 adjustedSamplePos = vec2(samplePos.x, (samplePos.y * aspectRatio + 0.5 - 0.5 * aspectRatio));
float distanceFromSamplePoint = distance(adjustedSamplePos, textureCoordinateToUse);
vec3 sampledColor = texture2D(inputImageTexture, samplePos ).rgb;
float dotScaling = 1.0 - dot(sampledColor, W);
float checkForPresenceWithinDot = 1.0 - step(distanceFromSamplePoint, (fractionalWidthOfPixel * 0.5) * dotScaling);
gl_FragColor = vec4(vec3(checkForPresenceWithinDot), 1.0);
}
);
#endif
@implementation GPUImageHalftoneFilter
- (id)init;
{
if (!(self = [super initWithFragmentShaderFromString:kGPUImageHalftoneFragmentShaderString]))
{
return nil;
}
self.fractionalWidthOfAPixel = 0.01;
return self;
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageHalftoneFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 753 |
```objective-c
#import "GPUImageFramebuffer.h"
#import "GPUImageOutput.h"
@interface GPUImageFramebuffer()
{
GLuint framebuffer;
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
CVPixelBufferRef renderTarget;
CVOpenGLESTextureRef renderTexture;
NSUInteger readLockCount;
#else
#endif
NSUInteger framebufferReferenceCount;
BOOL referenceCountingDisabled;
}
- (void)generateFramebuffer;
- (void)generateTexture;
- (void)destroyFramebuffer;
@end
void dataProviderReleaseCallback (void *info, const void *data, size_t size);
void dataProviderUnlockCallback (void *info, const void *data, size_t size);
@implementation GPUImageFramebuffer
@synthesize size = _size;
@synthesize textureOptions = _textureOptions;
@synthesize texture = _texture;
@synthesize missingFramebuffer = _missingFramebuffer;
#pragma mark -
#pragma mark Initialization and teardown
- (id)initWithSize:(CGSize)framebufferSize textureOptions:(GPUTextureOptions)fboTextureOptions onlyTexture:(BOOL)onlyGenerateTexture;
{
if (!(self = [super init]))
{
return nil;
}
_textureOptions = fboTextureOptions;
_size = framebufferSize;
framebufferReferenceCount = 0;
referenceCountingDisabled = NO;
_missingFramebuffer = onlyGenerateTexture;
if (_missingFramebuffer)
{
runSynchronouslyOnVideoProcessingQueue(^{
[GPUImageContext useImageProcessingContext];
[self generateTexture];
framebuffer = 0;
});
}
else
{
[self generateFramebuffer];
}
return self;
}
- (id)initWithSize:(CGSize)framebufferSize overriddenTexture:(GLuint)inputTexture;
{
if (!(self = [super init]))
{
return nil;
}
GPUTextureOptions defaultTextureOptions;
defaultTextureOptions.minFilter = GL_LINEAR;
defaultTextureOptions.magFilter = GL_LINEAR;
defaultTextureOptions.wrapS = GL_CLAMP_TO_EDGE;
defaultTextureOptions.wrapT = GL_CLAMP_TO_EDGE;
defaultTextureOptions.internalFormat = GL_RGBA;
defaultTextureOptions.format = GL_BGRA;
defaultTextureOptions.type = GL_UNSIGNED_BYTE;
_textureOptions = defaultTextureOptions;
_size = framebufferSize;
framebufferReferenceCount = 0;
referenceCountingDisabled = YES;
_texture = inputTexture;
return self;
}
- (id)initWithSize:(CGSize)framebufferSize;
{
GPUTextureOptions defaultTextureOptions;
defaultTextureOptions.minFilter = GL_LINEAR;
defaultTextureOptions.magFilter = GL_LINEAR;
defaultTextureOptions.wrapS = GL_CLAMP_TO_EDGE;
defaultTextureOptions.wrapT = GL_CLAMP_TO_EDGE;
defaultTextureOptions.internalFormat = GL_RGBA;
defaultTextureOptions.format = GL_BGRA;
defaultTextureOptions.type = GL_UNSIGNED_BYTE;
if (!(self = [self initWithSize:framebufferSize textureOptions:defaultTextureOptions onlyTexture:NO]))
{
return nil;
}
return self;
}
- (void)dealloc
{
[self destroyFramebuffer];
}
#pragma mark -
#pragma mark Internal
- (void)generateTexture;
{
glActiveTexture(GL_TEXTURE1);
glGenTextures(1, &_texture);
glBindTexture(GL_TEXTURE_2D, _texture);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, _textureOptions.minFilter);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, _textureOptions.magFilter);
// This is necessary for non-power-of-two textures
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, _textureOptions.wrapS);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, _textureOptions.wrapT);
// TODO: Handle mipmaps
}
- (void)generateFramebuffer;
{
runSynchronouslyOnVideoProcessingQueue(^{
[GPUImageContext useImageProcessingContext];
glGenFramebuffers(1, &framebuffer);
glBindFramebuffer(GL_FRAMEBUFFER, framebuffer);
// By default, all framebuffers on iOS 5.0+ devices are backed by texture caches, using one shared cache
if ([GPUImageContext supportsFastTextureUpload])
{
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
CVOpenGLESTextureCacheRef coreVideoTextureCache = [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache];
// Code originally sourced from path_to_url
CFDictionaryRef empty; // empty value for attr value.
CFMutableDictionaryRef attrs;
empty = CFDictionaryCreate(kCFAllocatorDefault, NULL, NULL, 0, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks); // our empty IOSurface properties dictionary
attrs = CFDictionaryCreateMutable(kCFAllocatorDefault, 1, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks);
CFDictionarySetValue(attrs, kCVPixelBufferIOSurfacePropertiesKey, empty);
CVReturn err = CVPixelBufferCreate(kCFAllocatorDefault, (int)_size.width, (int)_size.height, kCVPixelFormatType_32BGRA, attrs, &renderTarget);
if (err)
{
NSLog(@"FBO size: %f, %f", _size.width, _size.height);
NSAssert(NO, @"Error at CVPixelBufferCreate %d", err);
}
err = CVOpenGLESTextureCacheCreateTextureFromImage (kCFAllocatorDefault, coreVideoTextureCache, renderTarget,
NULL, // texture attributes
GL_TEXTURE_2D,
_textureOptions.internalFormat, // opengl format
(int)_size.width,
(int)_size.height,
_textureOptions.format, // native iOS format
_textureOptions.type,
0,
&renderTexture);
if (err)
{
NSAssert(NO, @"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err);
}
CFRelease(attrs);
CFRelease(empty);
glBindTexture(CVOpenGLESTextureGetTarget(renderTexture), CVOpenGLESTextureGetName(renderTexture));
_texture = CVOpenGLESTextureGetName(renderTexture);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, _textureOptions.wrapS);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, _textureOptions.wrapT);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, CVOpenGLESTextureGetName(renderTexture), 0);
#endif
}
else
{
[self generateTexture];
glBindTexture(GL_TEXTURE_2D, _texture);
glTexImage2D(GL_TEXTURE_2D, 0, _textureOptions.internalFormat, (int)_size.width, (int)_size.height, 0, _textureOptions.format, _textureOptions.type, 0);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, _texture, 0);
}
#ifndef NS_BLOCK_ASSERTIONS
GLenum status = glCheckFramebufferStatus(GL_FRAMEBUFFER);
NSAssert(status == GL_FRAMEBUFFER_COMPLETE, @"Incomplete filter FBO: %d", status);
#endif
glBindTexture(GL_TEXTURE_2D, 0);
});
}
- (void)destroyFramebuffer;
{
runSynchronouslyOnVideoProcessingQueue(^{
[GPUImageContext useImageProcessingContext];
if (framebuffer)
{
glDeleteFramebuffers(1, &framebuffer);
framebuffer = 0;
}
if ([GPUImageContext supportsFastTextureUpload] && (!_missingFramebuffer))
{
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
if (renderTarget)
{
CFRelease(renderTarget);
renderTarget = NULL;
}
if (renderTexture)
{
CFRelease(renderTexture);
renderTexture = NULL;
}
#endif
}
else
{
glDeleteTextures(1, &_texture);
}
});
}
#pragma mark -
#pragma mark Usage
- (void)activateFramebuffer;
{
glBindFramebuffer(GL_FRAMEBUFFER, framebuffer);
glViewport(0, 0, (int)_size.width, (int)_size.height);
}
#pragma mark -
#pragma mark Reference counting
- (void)lock;
{
if (referenceCountingDisabled)
{
return;
}
framebufferReferenceCount++;
}
- (void)unlock;
{
if (referenceCountingDisabled)
{
return;
}
NSAssert(framebufferReferenceCount > 0, @"Tried to overrelease a framebuffer, did you forget to call -useNextFrameForImageCapture before using -imageFromCurrentFramebuffer?");
framebufferReferenceCount--;
if (framebufferReferenceCount < 1)
{
[[GPUImageContext sharedFramebufferCache] returnFramebufferToCache:self];
}
}
- (void)clearAllLocks;
{
framebufferReferenceCount = 0;
}
- (void)disableReferenceCounting;
{
referenceCountingDisabled = YES;
}
- (void)enableReferenceCounting;
{
referenceCountingDisabled = NO;
}
#pragma mark -
#pragma mark Image capture
void dataProviderReleaseCallback (void *info, const void *data, size_t size)
{
free((void *)data);
}
void dataProviderUnlockCallback (void *info, const void *data, size_t size)
{
GPUImageFramebuffer *framebuffer = (__bridge_transfer GPUImageFramebuffer*)info;
[framebuffer restoreRenderTarget];
[framebuffer unlock];
[[GPUImageContext sharedFramebufferCache] removeFramebufferFromActiveImageCaptureList:framebuffer];
}
- (CGImageRef)newCGImageFromFramebufferContents;
{
// a CGImage can only be created from a 'normal' color texture
NSAssert(self.textureOptions.internalFormat == GL_RGBA, @"For conversion to a CGImage the output texture format for this filter must be GL_RGBA.");
NSAssert(self.textureOptions.type == GL_UNSIGNED_BYTE, @"For conversion to a CGImage the type of the output texture of this filter must be GL_UNSIGNED_BYTE.");
__block CGImageRef cgImageFromBytes;
runSynchronouslyOnVideoProcessingQueue(^{
[GPUImageContext useImageProcessingContext];
NSUInteger totalBytesForImage = (int)_size.width * (int)_size.height * 4;
// It appears that the width of a texture must be padded out to be a multiple of 8 (32 bytes) if reading from it using a texture cache
GLubyte *rawImagePixels;
CGDataProviderRef dataProvider = NULL;
if ([GPUImageContext supportsFastTextureUpload])
{
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSUInteger paddedWidthOfImage = CVPixelBufferGetBytesPerRow(renderTarget) / 4.0;
NSUInteger paddedBytesForImage = paddedWidthOfImage * (int)_size.height * 4;
glFinish();
CFRetain(renderTarget); // I need to retain the pixel buffer here and release in the data source callback to prevent its bytes from being prematurely deallocated during a photo write operation
[self lockForReading];
rawImagePixels = (GLubyte *)CVPixelBufferGetBaseAddress(renderTarget);
dataProvider = CGDataProviderCreateWithData((__bridge_retained void*)self, rawImagePixels, paddedBytesForImage, dataProviderUnlockCallback);
[[GPUImageContext sharedFramebufferCache] addFramebufferToActiveImageCaptureList:self]; // In case the framebuffer is swapped out on the filter, need to have a strong reference to it somewhere for it to hang on while the image is in existence
#else
#endif
}
else
{
[self activateFramebuffer];
rawImagePixels = (GLubyte *)malloc(totalBytesForImage);
glReadPixels(0, 0, (int)_size.width, (int)_size.height, GL_RGBA, GL_UNSIGNED_BYTE, rawImagePixels);
dataProvider = CGDataProviderCreateWithData(NULL, rawImagePixels, totalBytesForImage, dataProviderReleaseCallback);
[self unlock]; // Don't need to keep this around anymore
}
CGColorSpaceRef defaultRGBColorSpace = CGColorSpaceCreateDeviceRGB();
if ([GPUImageContext supportsFastTextureUpload])
{
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
cgImageFromBytes = CGImageCreate((int)_size.width, (int)_size.height, 8, 32, CVPixelBufferGetBytesPerRow(renderTarget), defaultRGBColorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst, dataProvider, NULL, NO, kCGRenderingIntentDefault);
#else
#endif
}
else
{
cgImageFromBytes = CGImageCreate((int)_size.width, (int)_size.height, 8, 32, 4 * (int)_size.width, defaultRGBColorSpace, kCGBitmapByteOrderDefault | kCGImageAlphaLast, dataProvider, NULL, NO, kCGRenderingIntentDefault);
}
// Capture image with current device orientation
CGDataProviderRelease(dataProvider);
CGColorSpaceRelease(defaultRGBColorSpace);
});
return cgImageFromBytes;
}
- (void)restoreRenderTarget;
{
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
[self unlockAfterReading];
CFRelease(renderTarget);
#else
#endif
}
#pragma mark -
#pragma mark Raw data bytes
- (void)lockForReading
{
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
if ([GPUImageContext supportsFastTextureUpload])
{
if (readLockCount == 0)
{
CVPixelBufferLockBaseAddress(renderTarget, 0);
}
readLockCount++;
}
#endif
}
- (void)unlockAfterReading
{
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
if ([GPUImageContext supportsFastTextureUpload])
{
NSAssert(readLockCount > 0, @"Unbalanced call to -[GPUImageFramebuffer unlockAfterReading]");
readLockCount--;
if (readLockCount == 0)
{
CVPixelBufferUnlockBaseAddress(renderTarget, 0);
}
}
#endif
}
- (NSUInteger)bytesPerRow;
{
if ([GPUImageContext supportsFastTextureUpload])
{
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
return CVPixelBufferGetBytesPerRow(renderTarget);
#else
return _size.width * 4; // TODO: do more with this on the non-texture-cache side
#endif
}
else
{
return _size.width * 4;
}
}
- (GLubyte *)byteBuffer;
{
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
[self lockForReading];
GLubyte * bufferBytes = CVPixelBufferGetBaseAddress(renderTarget);
[self unlockAfterReading];
return bufferBytes;
#else
return NULL; // TODO: do more with this on the non-texture-cache side
#endif
}
- (GLuint)texture;
{
// NSLog(@"Accessing texture: %d from FB: %@", _texture, self);
return _texture;
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageFramebuffer.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 3,230 |
```objective-c
#import "GPUImageGaussianBlurFilter.h"
@implementation GPUImageGaussianBlurFilter
@synthesize texelSpacingMultiplier = _texelSpacingMultiplier;
@synthesize blurRadiusInPixels = _blurRadiusInPixels;
@synthesize blurRadiusAsFractionOfImageWidth = _blurRadiusAsFractionOfImageWidth;
@synthesize blurRadiusAsFractionOfImageHeight = _blurRadiusAsFractionOfImageHeight;
@synthesize blurPasses = _blurPasses;
#pragma mark -
#pragma mark Initialization and teardown
- (id)initWithFirstStageVertexShaderFromString:(NSString *)firstStageVertexShaderString firstStageFragmentShaderFromString:(NSString *)firstStageFragmentShaderString secondStageVertexShaderFromString:(NSString *)secondStageVertexShaderString secondStageFragmentShaderFromString:(NSString *)secondStageFragmentShaderString
{
if (!(self = [super initWithFirstStageVertexShaderFromString:firstStageVertexShaderString firstStageFragmentShaderFromString:firstStageFragmentShaderString secondStageVertexShaderFromString:secondStageVertexShaderString secondStageFragmentShaderFromString:secondStageFragmentShaderString]))
{
return nil;
}
self.texelSpacingMultiplier = 1.0;
_blurRadiusInPixels = 2.0;
shouldResizeBlurRadiusWithImageSize = NO;
return self;
}
- (id)init;
{
NSString *currentGaussianBlurVertexShader = [[self class] vertexShaderForOptimizedBlurOfRadius:4 sigma:2.0];
NSString *currentGaussianBlurFragmentShader = [[self class] fragmentShaderForOptimizedBlurOfRadius:4 sigma:2.0];
return [self initWithFirstStageVertexShaderFromString:currentGaussianBlurVertexShader firstStageFragmentShaderFromString:currentGaussianBlurFragmentShader secondStageVertexShaderFromString:currentGaussianBlurVertexShader secondStageFragmentShaderFromString:currentGaussianBlurFragmentShader];
}
#pragma mark -
#pragma mark Auto-generation of optimized Gaussian shaders
// "Implementation limit of 32 varying components exceeded" - Max number of varyings for these GPUs
+ (NSString *)vertexShaderForStandardBlurOfRadius:(NSUInteger)blurRadius sigma:(CGFloat)sigma;
{
if (blurRadius < 1)
{
return kGPUImageVertexShaderString;
}
// NSLog(@"Max varyings: %d", [GPUImageContext maximumVaryingVectorsForThisDevice]);
NSMutableString *shaderString = [[NSMutableString alloc] init];
// Header
[shaderString appendFormat:@"\
attribute vec4 position;\n\
attribute vec4 inputTextureCoordinate;\n\
\n\
uniform float texelWidthOffset;\n\
uniform float texelHeightOffset;\n\
\n\
varying vec2 blurCoordinates[%lu];\n\
\n\
void main()\n\
{\n\
gl_Position = position;\n\
\n\
vec2 singleStepOffset = vec2(texelWidthOffset, texelHeightOffset);\n", (unsigned long)(blurRadius * 2 + 1) ];
// Inner offset loop
for (NSUInteger currentBlurCoordinateIndex = 0; currentBlurCoordinateIndex < (blurRadius * 2 + 1); currentBlurCoordinateIndex++)
{
NSInteger offsetFromCenter = currentBlurCoordinateIndex - blurRadius;
if (offsetFromCenter < 0)
{
[shaderString appendFormat:@"blurCoordinates[%ld] = inputTextureCoordinate.xy - singleStepOffset * %f;\n", (unsigned long)currentBlurCoordinateIndex, (GLfloat)(-offsetFromCenter)];
}
else if (offsetFromCenter > 0)
{
[shaderString appendFormat:@"blurCoordinates[%ld] = inputTextureCoordinate.xy + singleStepOffset * %f;\n", (unsigned long)currentBlurCoordinateIndex, (GLfloat)(offsetFromCenter)];
}
else
{
[shaderString appendFormat:@"blurCoordinates[%ld] = inputTextureCoordinate.xy;\n", (unsigned long)currentBlurCoordinateIndex];
}
}
// Footer
[shaderString appendString:@"}\n"];
return shaderString;
}
+ (NSString *)fragmentShaderForStandardBlurOfRadius:(NSUInteger)blurRadius sigma:(CGFloat)sigma;
{
if (blurRadius < 1)
{
return kGPUImagePassthroughFragmentShaderString;
}
// First, generate the normal Gaussian weights for a given sigma
GLfloat *standardGaussianWeights = calloc(blurRadius + 1, sizeof(GLfloat));
GLfloat sumOfWeights = 0.0;
for (NSUInteger currentGaussianWeightIndex = 0; currentGaussianWeightIndex < blurRadius + 1; currentGaussianWeightIndex++)
{
standardGaussianWeights[currentGaussianWeightIndex] = (1.0 / sqrt(2.0 * M_PI * pow(sigma, 2.0))) * exp(-pow(currentGaussianWeightIndex, 2.0) / (2.0 * pow(sigma, 2.0)));
if (currentGaussianWeightIndex == 0)
{
sumOfWeights += standardGaussianWeights[currentGaussianWeightIndex];
}
else
{
sumOfWeights += 2.0 * standardGaussianWeights[currentGaussianWeightIndex];
}
}
// Next, normalize these weights to prevent the clipping of the Gaussian curve at the end of the discrete samples from reducing luminance
for (NSUInteger currentGaussianWeightIndex = 0; currentGaussianWeightIndex < blurRadius + 1; currentGaussianWeightIndex++)
{
standardGaussianWeights[currentGaussianWeightIndex] = standardGaussianWeights[currentGaussianWeightIndex] / sumOfWeights;
}
// Finally, generate the shader from these weights
NSMutableString *shaderString = [[NSMutableString alloc] init];
// Header
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
[shaderString appendFormat:@"\
uniform sampler2D inputImageTexture;\n\
\n\
varying highp vec2 blurCoordinates[%lu];\n\
\n\
void main()\n\
{\n\
lowp vec4 sum = vec4(0.0);\n", (unsigned long)(blurRadius * 2 + 1) ];
#else
[shaderString appendFormat:@"\
uniform sampler2D inputImageTexture;\n\
\n\
varying vec2 blurCoordinates[%lu];\n\
\n\
void main()\n\
{\n\
vec4 sum = vec4(0.0);\n", (blurRadius * 2 + 1) ];
#endif
// Inner texture loop
for (NSUInteger currentBlurCoordinateIndex = 0; currentBlurCoordinateIndex < (blurRadius * 2 + 1); currentBlurCoordinateIndex++)
{
NSInteger offsetFromCenter = currentBlurCoordinateIndex - blurRadius;
if (offsetFromCenter < 0)
{
[shaderString appendFormat:@"sum += texture2D(inputImageTexture, blurCoordinates[%lu]) * %f;\n", (unsigned long)currentBlurCoordinateIndex, standardGaussianWeights[-offsetFromCenter]];
}
else
{
[shaderString appendFormat:@"sum += texture2D(inputImageTexture, blurCoordinates[%lu]) * %f;\n", (unsigned long)currentBlurCoordinateIndex, standardGaussianWeights[offsetFromCenter]];
}
}
// Footer
[shaderString appendString:@"\
gl_FragColor = sum;\n\
}\n"];
free(standardGaussianWeights);
return shaderString;
}
+ (NSString *)vertexShaderForOptimizedBlurOfRadius:(NSUInteger)blurRadius sigma:(CGFloat)sigma;
{
if (blurRadius < 1)
{
return kGPUImageVertexShaderString;
}
// First, generate the normal Gaussian weights for a given sigma
GLfloat *standardGaussianWeights = calloc(blurRadius + 1, sizeof(GLfloat));
GLfloat sumOfWeights = 0.0;
for (NSUInteger currentGaussianWeightIndex = 0; currentGaussianWeightIndex < blurRadius + 1; currentGaussianWeightIndex++)
{
standardGaussianWeights[currentGaussianWeightIndex] = (1.0 / sqrt(2.0 * M_PI * pow(sigma, 2.0))) * exp(-pow(currentGaussianWeightIndex, 2.0) / (2.0 * pow(sigma, 2.0)));
if (currentGaussianWeightIndex == 0)
{
sumOfWeights += standardGaussianWeights[currentGaussianWeightIndex];
}
else
{
sumOfWeights += 2.0 * standardGaussianWeights[currentGaussianWeightIndex];
}
}
// Next, normalize these weights to prevent the clipping of the Gaussian curve at the end of the discrete samples from reducing luminance
for (NSUInteger currentGaussianWeightIndex = 0; currentGaussianWeightIndex < blurRadius + 1; currentGaussianWeightIndex++)
{
standardGaussianWeights[currentGaussianWeightIndex] = standardGaussianWeights[currentGaussianWeightIndex] / sumOfWeights;
}
// From these weights we calculate the offsets to read interpolated values from
NSUInteger numberOfOptimizedOffsets = MIN(blurRadius / 2 + (blurRadius % 2), 7);
GLfloat *optimizedGaussianOffsets = calloc(numberOfOptimizedOffsets, sizeof(GLfloat));
for (NSUInteger currentOptimizedOffset = 0; currentOptimizedOffset < numberOfOptimizedOffsets; currentOptimizedOffset++)
{
GLfloat firstWeight = standardGaussianWeights[currentOptimizedOffset*2 + 1];
GLfloat secondWeight = standardGaussianWeights[currentOptimizedOffset*2 + 2];
GLfloat optimizedWeight = firstWeight + secondWeight;
optimizedGaussianOffsets[currentOptimizedOffset] = (firstWeight * (currentOptimizedOffset*2 + 1) + secondWeight * (currentOptimizedOffset*2 + 2)) / optimizedWeight;
}
NSMutableString *shaderString = [[NSMutableString alloc] init];
// Header
[shaderString appendFormat:@"\
attribute vec4 position;\n\
attribute vec4 inputTextureCoordinate;\n\
\n\
uniform float texelWidthOffset;\n\
uniform float texelHeightOffset;\n\
\n\
varying vec2 blurCoordinates[%lu];\n\
\n\
void main()\n\
{\n\
gl_Position = position;\n\
\n\
vec2 singleStepOffset = vec2(texelWidthOffset, texelHeightOffset);\n", (unsigned long)(1 + (numberOfOptimizedOffsets * 2))];
// Inner offset loop
[shaderString appendString:@"blurCoordinates[0] = inputTextureCoordinate.xy;\n"];
for (NSUInteger currentOptimizedOffset = 0; currentOptimizedOffset < numberOfOptimizedOffsets; currentOptimizedOffset++)
{
[shaderString appendFormat:@"\
blurCoordinates[%lu] = inputTextureCoordinate.xy + singleStepOffset * %f;\n\
blurCoordinates[%lu] = inputTextureCoordinate.xy - singleStepOffset * %f;\n", (unsigned long)((currentOptimizedOffset * 2) + 1), optimizedGaussianOffsets[currentOptimizedOffset], (unsigned long)((currentOptimizedOffset * 2) + 2), optimizedGaussianOffsets[currentOptimizedOffset]];
}
// Footer
[shaderString appendString:@"}\n"];
free(optimizedGaussianOffsets);
free(standardGaussianWeights);
return shaderString;
}
+ (NSString *)fragmentShaderForOptimizedBlurOfRadius:(NSUInteger)blurRadius sigma:(CGFloat)sigma;
{
if (blurRadius < 1)
{
return kGPUImagePassthroughFragmentShaderString;
}
// First, generate the normal Gaussian weights for a given sigma
GLfloat *standardGaussianWeights = calloc(blurRadius + 1, sizeof(GLfloat));
GLfloat sumOfWeights = 0.0;
for (NSUInteger currentGaussianWeightIndex = 0; currentGaussianWeightIndex < blurRadius + 1; currentGaussianWeightIndex++)
{
standardGaussianWeights[currentGaussianWeightIndex] = (1.0 / sqrt(2.0 * M_PI * pow(sigma, 2.0))) * exp(-pow(currentGaussianWeightIndex, 2.0) / (2.0 * pow(sigma, 2.0)));
if (currentGaussianWeightIndex == 0)
{
sumOfWeights += standardGaussianWeights[currentGaussianWeightIndex];
}
else
{
sumOfWeights += 2.0 * standardGaussianWeights[currentGaussianWeightIndex];
}
}
// Next, normalize these weights to prevent the clipping of the Gaussian curve at the end of the discrete samples from reducing luminance
for (NSUInteger currentGaussianWeightIndex = 0; currentGaussianWeightIndex < blurRadius + 1; currentGaussianWeightIndex++)
{
standardGaussianWeights[currentGaussianWeightIndex] = standardGaussianWeights[currentGaussianWeightIndex] / sumOfWeights;
}
// From these weights we calculate the offsets to read interpolated values from
NSUInteger numberOfOptimizedOffsets = MIN(blurRadius / 2 + (blurRadius % 2), 7);
NSUInteger trueNumberOfOptimizedOffsets = blurRadius / 2 + (blurRadius % 2);
NSMutableString *shaderString = [[NSMutableString alloc] init];
// Header
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
[shaderString appendFormat:@"\
uniform sampler2D inputImageTexture;\n\
uniform highp float texelWidthOffset;\n\
uniform highp float texelHeightOffset;\n\
\n\
varying highp vec2 blurCoordinates[%lu];\n\
\n\
void main()\n\
{\n\
lowp vec4 sum = vec4(0.0);\n", (unsigned long)(1 + (numberOfOptimizedOffsets * 2)) ];
#else
[shaderString appendFormat:@"\
uniform sampler2D inputImageTexture;\n\
uniform float texelWidthOffset;\n\
uniform float texelHeightOffset;\n\
\n\
varying vec2 blurCoordinates[%lu];\n\
\n\
void main()\n\
{\n\
vec4 sum = vec4(0.0);\n", 1 + (numberOfOptimizedOffsets * 2) ];
#endif
// Inner texture loop
[shaderString appendFormat:@"sum += texture2D(inputImageTexture, blurCoordinates[0]) * %f;\n", standardGaussianWeights[0]];
for (NSUInteger currentBlurCoordinateIndex = 0; currentBlurCoordinateIndex < numberOfOptimizedOffsets; currentBlurCoordinateIndex++)
{
GLfloat firstWeight = standardGaussianWeights[currentBlurCoordinateIndex * 2 + 1];
GLfloat secondWeight = standardGaussianWeights[currentBlurCoordinateIndex * 2 + 2];
GLfloat optimizedWeight = firstWeight + secondWeight;
[shaderString appendFormat:@"sum += texture2D(inputImageTexture, blurCoordinates[%lu]) * %f;\n", (unsigned long)((currentBlurCoordinateIndex * 2) + 1), optimizedWeight];
[shaderString appendFormat:@"sum += texture2D(inputImageTexture, blurCoordinates[%lu]) * %f;\n", (unsigned long)((currentBlurCoordinateIndex * 2) + 2), optimizedWeight];
}
// If the number of required samples exceeds the amount we can pass in via varyings, we have to do dependent texture reads in the fragment shader
if (trueNumberOfOptimizedOffsets > numberOfOptimizedOffsets)
{
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
[shaderString appendString:@"highp vec2 singleStepOffset = vec2(texelWidthOffset, texelHeightOffset);\n"];
#else
[shaderString appendString:@"vec2 singleStepOffset = vec2(texelWidthOffset, texelHeightOffset);\n"];
#endif
for (NSUInteger currentOverlowTextureRead = numberOfOptimizedOffsets; currentOverlowTextureRead < trueNumberOfOptimizedOffsets; currentOverlowTextureRead++)
{
GLfloat firstWeight = standardGaussianWeights[currentOverlowTextureRead * 2 + 1];
GLfloat secondWeight = standardGaussianWeights[currentOverlowTextureRead * 2 + 2];
GLfloat optimizedWeight = firstWeight + secondWeight;
GLfloat optimizedOffset = (firstWeight * (currentOverlowTextureRead * 2 + 1) + secondWeight * (currentOverlowTextureRead * 2 + 2)) / optimizedWeight;
[shaderString appendFormat:@"sum += texture2D(inputImageTexture, blurCoordinates[0] + singleStepOffset * %f) * %f;\n", optimizedOffset, optimizedWeight];
[shaderString appendFormat:@"sum += texture2D(inputImageTexture, blurCoordinates[0] - singleStepOffset * %f) * %f;\n", optimizedOffset, optimizedWeight];
}
}
// Footer
[shaderString appendString:@"\
gl_FragColor = sum;\n\
}\n"];
free(standardGaussianWeights);
return shaderString;
}
- (void)setupFilterForSize:(CGSize)filterFrameSize;
{
[super setupFilterForSize:filterFrameSize];
if (shouldResizeBlurRadiusWithImageSize)
{
if (self.blurRadiusAsFractionOfImageWidth > 0)
{
self.blurRadiusInPixels = filterFrameSize.width * self.blurRadiusAsFractionOfImageWidth;
}
else
{
self.blurRadiusInPixels = filterFrameSize.height * self.blurRadiusAsFractionOfImageHeight;
}
}
}
#pragma mark -
#pragma mark Rendering
- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;
{
[super renderToTextureWithVertices:vertices textureCoordinates:textureCoordinates];
for (NSUInteger currentAdditionalBlurPass = 1; currentAdditionalBlurPass < _blurPasses; currentAdditionalBlurPass++)
{
[super renderToTextureWithVertices:vertices textureCoordinates:[[self class] textureCoordinatesForRotation:kGPUImageNoRotation]];
}
}
- (void)switchToVertexShader:(NSString *)newVertexShader fragmentShader:(NSString *)newFragmentShader;
{
runSynchronouslyOnVideoProcessingQueue(^{
[GPUImageContext useImageProcessingContext];
filterProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:newVertexShader fragmentShaderString:newFragmentShader];
if (!filterProgram.initialized)
{
[self initializeAttributes];
if (![filterProgram link])
{
NSString *progLog = [filterProgram programLog];
NSLog(@"Program link log: %@", progLog);
NSString *fragLog = [filterProgram fragmentShaderLog];
NSLog(@"Fragment shader compile log: %@", fragLog);
NSString *vertLog = [filterProgram vertexShaderLog];
NSLog(@"Vertex shader compile log: %@", vertLog);
filterProgram = nil;
NSAssert(NO, @"Filter shader link failed");
}
}
filterPositionAttribute = [filterProgram attributeIndex:@"position"];
filterTextureCoordinateAttribute = [filterProgram attributeIndex:@"inputTextureCoordinate"];
filterInputTextureUniform = [filterProgram uniformIndex:@"inputImageTexture"]; // This does assume a name of "inputImageTexture" for the fragment shader
verticalPassTexelWidthOffsetUniform = [filterProgram uniformIndex:@"texelWidthOffset"];
verticalPassTexelHeightOffsetUniform = [filterProgram uniformIndex:@"texelHeightOffset"];
[GPUImageContext setActiveShaderProgram:filterProgram];
glEnableVertexAttribArray(filterPositionAttribute);
glEnableVertexAttribArray(filterTextureCoordinateAttribute);
secondFilterProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:newVertexShader fragmentShaderString:newFragmentShader];
if (!secondFilterProgram.initialized)
{
[self initializeSecondaryAttributes];
if (![secondFilterProgram link])
{
NSString *progLog = [secondFilterProgram programLog];
NSLog(@"Program link log: %@", progLog);
NSString *fragLog = [secondFilterProgram fragmentShaderLog];
NSLog(@"Fragment shader compile log: %@", fragLog);
NSString *vertLog = [secondFilterProgram vertexShaderLog];
NSLog(@"Vertex shader compile log: %@", vertLog);
secondFilterProgram = nil;
NSAssert(NO, @"Filter shader link failed");
}
}
secondFilterPositionAttribute = [secondFilterProgram attributeIndex:@"position"];
secondFilterTextureCoordinateAttribute = [secondFilterProgram attributeIndex:@"inputTextureCoordinate"];
secondFilterInputTextureUniform = [secondFilterProgram uniformIndex:@"inputImageTexture"]; // This does assume a name of "inputImageTexture" for the fragment shader
secondFilterInputTextureUniform2 = [secondFilterProgram uniformIndex:@"inputImageTexture2"]; // This does assume a name of "inputImageTexture2" for second input texture in the fragment shader
horizontalPassTexelWidthOffsetUniform = [secondFilterProgram uniformIndex:@"texelWidthOffset"];
horizontalPassTexelHeightOffsetUniform = [secondFilterProgram uniformIndex:@"texelHeightOffset"];
[GPUImageContext setActiveShaderProgram:secondFilterProgram];
glEnableVertexAttribArray(secondFilterPositionAttribute);
glEnableVertexAttribArray(secondFilterTextureCoordinateAttribute);
[self setupFilterForSize:[self sizeOfFBO]];
glFinish();
});
}
#pragma mark -
#pragma mark Accessors
- (void)setTexelSpacingMultiplier:(CGFloat)newValue;
{
_texelSpacingMultiplier = newValue;
_verticalTexelSpacing = _texelSpacingMultiplier;
_horizontalTexelSpacing = _texelSpacingMultiplier;
[self setupFilterForSize:[self sizeOfFBO]];
}
// inputRadius for Core Image's CIGaussianBlur is really sigma in the Gaussian equation, so I'm using that for my blur radius, to be consistent
- (void)setBlurRadiusInPixels:(CGFloat)newValue;
{
// 7.0 is the limit for blur size for hardcoded varying offsets
if (round(newValue) != _blurRadiusInPixels)
{
_blurRadiusInPixels = round(newValue); // For now, only do integral sigmas
NSUInteger calculatedSampleRadius = 0;
if (_blurRadiusInPixels >= 1) // Avoid a divide-by-zero error here
{
// Calculate the number of pixels to sample from by setting a bottom limit for the contribution of the outermost pixel
CGFloat minimumWeightToFindEdgeOfSamplingArea = 1.0/256.0;
calculatedSampleRadius = floor(sqrt(-2.0 * pow(_blurRadiusInPixels, 2.0) * log(minimumWeightToFindEdgeOfSamplingArea * sqrt(2.0 * M_PI * pow(_blurRadiusInPixels, 2.0))) ));
calculatedSampleRadius += calculatedSampleRadius % 2; // There's nothing to gain from handling odd radius sizes, due to the optimizations I use
}
// NSLog(@"Blur radius: %f, calculated sample radius: %d", _blurRadiusInPixels, calculatedSampleRadius);
//
NSString *newGaussianBlurVertexShader = [[self class] vertexShaderForOptimizedBlurOfRadius:calculatedSampleRadius sigma:_blurRadiusInPixels];
NSString *newGaussianBlurFragmentShader = [[self class] fragmentShaderForOptimizedBlurOfRadius:calculatedSampleRadius sigma:_blurRadiusInPixels];
// NSLog(@"Optimized vertex shader: \n%@", newGaussianBlurVertexShader);
// NSLog(@"Optimized fragment shader: \n%@", newGaussianBlurFragmentShader);
//
[self switchToVertexShader:newGaussianBlurVertexShader fragmentShader:newGaussianBlurFragmentShader];
}
shouldResizeBlurRadiusWithImageSize = NO;
}
- (void)setBlurRadiusAsFractionOfImageWidth:(CGFloat)blurRadiusAsFractionOfImageWidth
{
if (blurRadiusAsFractionOfImageWidth < 0) return;
shouldResizeBlurRadiusWithImageSize = _blurRadiusAsFractionOfImageWidth != blurRadiusAsFractionOfImageWidth && blurRadiusAsFractionOfImageWidth > 0;
_blurRadiusAsFractionOfImageWidth = blurRadiusAsFractionOfImageWidth;
_blurRadiusAsFractionOfImageHeight = 0;
}
- (void)setBlurRadiusAsFractionOfImageHeight:(CGFloat)blurRadiusAsFractionOfImageHeight
{
if (blurRadiusAsFractionOfImageHeight < 0) return;
shouldResizeBlurRadiusWithImageSize = _blurRadiusAsFractionOfImageHeight != blurRadiusAsFractionOfImageHeight && blurRadiusAsFractionOfImageHeight > 0;
_blurRadiusAsFractionOfImageHeight = blurRadiusAsFractionOfImageHeight;
_blurRadiusAsFractionOfImageWidth = 0;
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageGaussianBlurFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 5,330 |
```objective-c
#import "GPUImageFilterGroup.h"
@class GPUImagePicture;
/** A photo filter based on Photoshop action by Miss Etikate:
path_to_url
*/
// Note: If you want to use this effect you have to add lookup_miss_etikate.png
// from Resources folder to your application bundle.
@interface GPUImageMissEtikateFilter : GPUImageFilterGroup
{
GPUImagePicture *lookupImageSource;
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageMissEtikateFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 92 |
```objective-c
#import "GPUImageHarrisCornerDetectionFilter.h"
/** Noble corner detector
This is the Noble variant on the Harris detector, from
Alison Noble, "Descriptions of Image Surfaces", PhD thesis, Department of Engineering Science, Oxford University 1989, p45.
*/
@interface GPUImageNobleCornerDetectionFilter : GPUImageHarrisCornerDetectionFilter
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageNobleCornerDetectionFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 77 |
```objective-c
#import "GPUImageFilter.h"
@interface GPUImageRGBFilter : GPUImageFilter
{
GLint redUniform;
GLint greenUniform;
GLint blueUniform;
}
// Normalized values by which each color channel is multiplied. The range is from 0.0 up, with 1.0 as the default.
@property (readwrite, nonatomic) CGFloat red;
@property (readwrite, nonatomic) CGFloat green;
@property (readwrite, nonatomic) CGFloat blue;
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageRGBFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 100 |
```objective-c
#import "GPUImagePixellateFilter.h"
@interface GPUImageHalftoneFilter : GPUImagePixellateFilter
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageHalftoneFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 28 |
```objective-c
#import "GPUImageSaturationBlendFilter.h"
/**
* Saturation blend mode based upon pseudo code from the PDF specification.
*/
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageSaturationBlendFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
varying highp vec2 textureCoordinate2;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
highp float lum(lowp vec3 c) {
return dot(c, vec3(0.3, 0.59, 0.11));
}
lowp vec3 clipcolor(lowp vec3 c) {
highp float l = lum(c);
lowp float n = min(min(c.r, c.g), c.b);
lowp float x = max(max(c.r, c.g), c.b);
if (n < 0.0) {
c.r = l + ((c.r - l) * l) / (l - n);
c.g = l + ((c.g - l) * l) / (l - n);
c.b = l + ((c.b - l) * l) / (l - n);
}
if (x > 1.0) {
c.r = l + ((c.r - l) * (1.0 - l)) / (x - l);
c.g = l + ((c.g - l) * (1.0 - l)) / (x - l);
c.b = l + ((c.b - l) * (1.0 - l)) / (x - l);
}
return c;
}
lowp vec3 setlum(lowp vec3 c, highp float l) {
highp float d = l - lum(c);
c = c + vec3(d);
return clipcolor(c);
}
highp float sat(lowp vec3 c) {
lowp float n = min(min(c.r, c.g), c.b);
lowp float x = max(max(c.r, c.g), c.b);
return x - n;
}
lowp float mid(lowp float cmin, lowp float cmid, lowp float cmax, highp float s) {
return ((cmid - cmin) * s) / (cmax - cmin);
}
lowp vec3 setsat(lowp vec3 c, highp float s) {
if (c.r > c.g) {
if (c.r > c.b) {
if (c.g > c.b) {
/* g is mid, b is min */
c.g = mid(c.b, c.g, c.r, s);
c.b = 0.0;
} else {
/* b is mid, g is min */
c.b = mid(c.g, c.b, c.r, s);
c.g = 0.0;
}
c.r = s;
} else {
/* b is max, r is mid, g is min */
c.r = mid(c.g, c.r, c.b, s);
c.b = s;
c.r = 0.0;
}
} else if (c.r > c.b) {
/* g is max, r is mid, b is min */
c.r = mid(c.b, c.r, c.g, s);
c.g = s;
c.b = 0.0;
} else if (c.g > c.b) {
/* g is max, b is mid, r is min */
c.b = mid(c.r, c.b, c.g, s);
c.g = s;
c.r = 0.0;
} else if (c.b > c.g) {
/* b is max, g is mid, r is min */
c.g = mid(c.r, c.g, c.b, s);
c.b = s;
c.r = 0.0;
} else {
c = vec3(0.0);
}
return c;
}
void main()
{
highp vec4 baseColor = texture2D(inputImageTexture, textureCoordinate);
highp vec4 overlayColor = texture2D(inputImageTexture2, textureCoordinate2);
gl_FragColor = vec4(baseColor.rgb * (1.0 - overlayColor.a) + setlum(setsat(baseColor.rgb, sat(overlayColor.rgb)), lum(baseColor.rgb)) * overlayColor.a, baseColor.a);
}
);
#else
NSString *const kGPUImageSaturationBlendFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
varying vec2 textureCoordinate2;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
float lum(vec3 c) {
return dot(c, vec3(0.3, 0.59, 0.11));
}
vec3 clipcolor(vec3 c) {
float l = lum(c);
float n = min(min(c.r, c.g), c.b);
float x = max(max(c.r, c.g), c.b);
if (n < 0.0) {
c.r = l + ((c.r - l) * l) / (l - n);
c.g = l + ((c.g - l) * l) / (l - n);
c.b = l + ((c.b - l) * l) / (l - n);
}
if (x > 1.0) {
c.r = l + ((c.r - l) * (1.0 - l)) / (x - l);
c.g = l + ((c.g - l) * (1.0 - l)) / (x - l);
c.b = l + ((c.b - l) * (1.0 - l)) / (x - l);
}
return c;
}
vec3 setlum(vec3 c, float l) {
float d = l - lum(c);
c = c + vec3(d);
return clipcolor(c);
}
float sat(vec3 c) {
float n = min(min(c.r, c.g), c.b);
float x = max(max(c.r, c.g), c.b);
return x - n;
}
float mid(float cmin, float cmid, float cmax, float s) {
return ((cmid - cmin) * s) / (cmax - cmin);
}
vec3 setsat(vec3 c, float s) {
if (c.r > c.g) {
if (c.r > c.b) {
if (c.g > c.b) {
/* g is mid, b is min */
c.g = mid(c.b, c.g, c.r, s);
c.b = 0.0;
} else {
/* b is mid, g is min */
c.b = mid(c.g, c.b, c.r, s);
c.g = 0.0;
}
c.r = s;
} else {
/* b is max, r is mid, g is min */
c.r = mid(c.g, c.r, c.b, s);
c.b = s;
c.r = 0.0;
}
} else if (c.r > c.b) {
/* g is max, r is mid, b is min */
c.r = mid(c.b, c.r, c.g, s);
c.g = s;
c.b = 0.0;
} else if (c.g > c.b) {
/* g is max, b is mid, r is min */
c.b = mid(c.r, c.b, c.g, s);
c.g = s;
c.r = 0.0;
} else if (c.b > c.g) {
/* b is max, g is mid, r is min */
c.g = mid(c.r, c.g, c.b, s);
c.b = s;
c.r = 0.0;
} else {
c = vec3(0.0);
}
return c;
}
void main()
{
vec4 baseColor = texture2D(inputImageTexture, textureCoordinate);
vec4 overlayColor = texture2D(inputImageTexture2, textureCoordinate2);
gl_FragColor = vec4(baseColor.rgb * (1.0 - overlayColor.a) + setlum(setsat(baseColor.rgb, sat(overlayColor.rgb)), lum(baseColor.rgb)) * overlayColor.a, baseColor.a);
}
);
#endif
@implementation GPUImageSaturationBlendFilter
- (id)init;
{
if (!(self = [super initWithFragmentShaderFromString:kGPUImageSaturationBlendFragmentShaderString]))
{
return nil;
}
return self;
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageSaturationBlendFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 1,918 |
```objective-c
#import <Foundation/Foundation.h>
#import <AVFoundation/AVFoundation.h>
#import "GPUImageContext.h"
#import "GPUImageOutput.h"
/** Protocol for getting Movie played callback.
*/
@protocol GPUImageMovieDelegate <NSObject>
- (void)didCompletePlayingMovie;
@end
/** Source object for filtering movies
*/
@interface GPUImageMovie : GPUImageOutput
@property (readwrite, retain) AVAsset *asset;
@property (readwrite, retain) AVPlayerItem *playerItem;
@property(readwrite, retain) NSURL *url;
/** This enables the benchmarking mode, which logs out instantaneous and average frame times to the console
*/
@property(readwrite, nonatomic) BOOL runBenchmark;
/** This determines whether to play back a movie as fast as the frames can be processed, or if the original speed of the movie should be respected. Defaults to NO.
*/
@property(readwrite, nonatomic) BOOL playAtActualSpeed;
/** This determines whether the video should repeat (loop) at the end and restart from the beginning. Defaults to NO.
*/
@property(readwrite, nonatomic) BOOL shouldRepeat;
/** This specifies the progress of the process on a scale from 0 to 1.0. A value of 0 means the process has not yet begun, A value of 1.0 means the conversaion is complete.
This property is not key-value observable.
*/
@property(readonly, nonatomic) float progress;
/** This is used to send the delete Movie did complete playing alert
*/
@property (readwrite, nonatomic, assign) id <GPUImageMovieDelegate>delegate;
@property (readonly, nonatomic) AVAssetReader *assetReader;
@property (readonly, nonatomic) BOOL audioEncodingIsFinished;
@property (readonly, nonatomic) BOOL videoEncodingIsFinished;
/// @name Initialization and teardown
- (id)initWithAsset:(AVAsset *)asset;
- (id)initWithPlayerItem:(AVPlayerItem *)playerItem;
- (id)initWithURL:(NSURL *)url;
- (void)yuvConversionSetup;
/// @name Movie processing
- (void)enableSynchronizedEncodingUsingMovieWriter:(GPUImageMovieWriter *)movieWriter;
- (BOOL)readNextVideoFrameFromOutput:(AVAssetReaderOutput *)readerVideoTrackOutput;
- (BOOL)readNextAudioSampleFromOutput:(AVAssetReaderOutput *)readerAudioTrackOutput;
- (void)startProcessing;
- (void)endProcessing;
- (void)cancelProcessing;
- (void)processMovieFrame:(CMSampleBufferRef)movieSampleBuffer;
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageMovie.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 518 |
```objective-c
#import "GPUImageTwoInputFilter.h"
@interface GPUImageSoftLightBlendFilter : GPUImageTwoInputFilter
{
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageSoftLightBlendFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 28 |
```objective-c
#import "GPUImageNobleCornerDetectionFilter.h"
@implementation GPUImageNobleCornerDetectionFilter
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageNobleCornerDetectionFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform lowp float sensitivity;
void main()
{
mediump vec3 derivativeElements = texture2D(inputImageTexture, textureCoordinate).rgb;
mediump float derivativeSum = derivativeElements.x + derivativeElements.y;
// R = (Ix^2 * Iy^2 - Ixy * Ixy) / (Ix^2 + Iy^2)
mediump float zElement = (derivativeElements.z * 2.0) - 1.0;
// mediump float harrisIntensity = (derivativeElements.x * derivativeElements.y - (derivativeElements.z * derivativeElements.z)) / (derivativeSum);
mediump float cornerness = (derivativeElements.x * derivativeElements.y - (zElement * zElement)) / (derivativeSum);
// Original Harris detector
// R = Ix^2 * Iy^2 - Ixy * Ixy - k * (Ix^2 + Iy^2)^2
// highp float harrisIntensity = derivativeElements.x * derivativeElements.y - (derivativeElements.z * derivativeElements.z) - harrisConstant * derivativeSum * derivativeSum;
// gl_FragColor = vec4(vec3(harrisIntensity * 7.0), 1.0);
gl_FragColor = vec4(vec3(cornerness * sensitivity), 1.0);
}
);
#else
NSString *const kGPUImageNobleCornerDetectionFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform float sensitivity;
void main()
{
vec3 derivativeElements = texture2D(inputImageTexture, textureCoordinate).rgb;
float derivativeSum = derivativeElements.x + derivativeElements.y;
// R = (Ix^2 * Iy^2 - Ixy * Ixy) / (Ix^2 + Iy^2)
float zElement = (derivativeElements.z * 2.0) - 1.0;
// mediump float harrisIntensity = (derivativeElements.x * derivativeElements.y - (derivativeElements.z * derivativeElements.z)) / (derivativeSum);
float cornerness = (derivativeElements.x * derivativeElements.y - (zElement * zElement)) / (derivativeSum);
// Original Harris detector
// R = Ix^2 * Iy^2 - Ixy * Ixy - k * (Ix^2 + Iy^2)^2
// highp float harrisIntensity = derivativeElements.x * derivativeElements.y - (derivativeElements.z * derivativeElements.z) - harrisConstant * derivativeSum * derivativeSum;
// gl_FragColor = vec4(vec3(harrisIntensity * 7.0), 1.0);
gl_FragColor = vec4(vec3(cornerness * sensitivity), 1.0);
}
);
#endif
#pragma mark -
#pragma mark Initialization and teardown
- (id)init;
{
if (!(self = [self initWithCornerDetectionFragmentShader:kGPUImageNobleCornerDetectionFragmentShaderString]))
{
return nil;
}
return self;
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageNobleCornerDetectionFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 750 |
```objective-c
#import "GPUImagePixellatePositionFilter.h"
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImagePixellationPositionFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform highp float fractionalWidthOfPixel;
uniform highp float aspectRatio;
uniform lowp vec2 pixelateCenter;
uniform highp float pixelateRadius;
void main()
{
highp vec2 textureCoordinateToUse = vec2(textureCoordinate.x, (textureCoordinate.y * aspectRatio + 0.5 - 0.5 * aspectRatio));
highp float dist = distance(pixelateCenter, textureCoordinateToUse);
if (dist < pixelateRadius)
{
highp vec2 sampleDivisor = vec2(fractionalWidthOfPixel, fractionalWidthOfPixel / aspectRatio);
highp vec2 samplePos = textureCoordinate - mod(textureCoordinate, sampleDivisor) + 0.5 * sampleDivisor;
gl_FragColor = texture2D(inputImageTexture, samplePos );
}
else
{
gl_FragColor = texture2D(inputImageTexture, textureCoordinate );
}
}
);
#else
NSString *const kGPUImagePixellationPositionFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform float fractionalWidthOfPixel;
uniform float aspectRatio;
uniform vec2 pixelateCenter;
uniform float pixelateRadius;
void main()
{
vec2 textureCoordinateToUse = vec2(textureCoordinate.x, (textureCoordinate.y * aspectRatio + 0.5 - 0.5 * aspectRatio));
float dist = distance(pixelateCenter, textureCoordinateToUse);
if (dist < pixelateRadius)
{
vec2 sampleDivisor = vec2(fractionalWidthOfPixel, fractionalWidthOfPixel / aspectRatio);
vec2 samplePos = textureCoordinate - mod(textureCoordinate, sampleDivisor) + 0.5 * sampleDivisor;
gl_FragColor = texture2D(inputImageTexture, samplePos );
}
else
{
gl_FragColor = texture2D(inputImageTexture, textureCoordinate );
}
}
);
#endif
@interface GPUImagePixellatePositionFilter ()
- (void)adjustAspectRatio;
@property (readwrite, nonatomic) CGFloat aspectRatio;
@end
@implementation GPUImagePixellatePositionFilter
@synthesize fractionalWidthOfAPixel = _fractionalWidthOfAPixel;
@synthesize aspectRatio = _aspectRatio;
@synthesize center = _center;
@synthesize radius = _radius;
#pragma mark -
#pragma mark Initialization and teardown
- (id)init;
{
if (!(self = [self initWithFragmentShaderFromString:kGPUImagePixellationPositionFragmentShaderString]))
{
return nil;
}
return self;
}
- (id)initWithFragmentShaderFromString:(NSString *)fragmentShaderString;
{
if (!(self = [super initWithFragmentShaderFromString:fragmentShaderString]))
{
return nil;
}
fractionalWidthOfAPixelUniform = [filterProgram uniformIndex:@"fractionalWidthOfPixel"];
aspectRatioUniform = [filterProgram uniformIndex:@"aspectRatio"];
centerUniform = [filterProgram uniformIndex:@"pixelateCenter"];
radiusUniform = [filterProgram uniformIndex:@"pixelateRadius"];
self.fractionalWidthOfAPixel = 0.05;
self.center = CGPointMake(0.5f, 0.5f);
self.radius = 0.25f;
return self;
}
- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;
{
CGSize oldInputSize = inputTextureSize;
[super setInputSize:newSize atIndex:textureIndex];
if ( (!CGSizeEqualToSize(oldInputSize, inputTextureSize)) && (!CGSizeEqualToSize(newSize, CGSizeZero)) )
{
[self adjustAspectRatio];
}
}
#pragma mark -
#pragma mark Accessors
- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;
{
[super setInputRotation:newInputRotation atIndex:textureIndex];
[self setCenter:self.center];
[self adjustAspectRatio];
}
- (void)adjustAspectRatio;
{
if (GPUImageRotationSwapsWidthAndHeight(inputRotation))
{
[self setAspectRatio:(inputTextureSize.width / inputTextureSize.height)];
}
else
{
[self setAspectRatio:(inputTextureSize.height / inputTextureSize.width)];
}
}
- (void)forceProcessingAtSize:(CGSize)frameSize;
{
[super forceProcessingAtSize:frameSize];
[self adjustAspectRatio];
}
- (void)setFractionalWidthOfAPixel:(CGFloat)newValue;
{
CGFloat singlePixelSpacing;
if (inputTextureSize.width != 0.0)
{
singlePixelSpacing = 1.0 / inputTextureSize.width;
}
else
{
singlePixelSpacing = 1.0 / 2048.0;
}
if (newValue < singlePixelSpacing)
{
_fractionalWidthOfAPixel = singlePixelSpacing;
}
else
{
_fractionalWidthOfAPixel = newValue;
}
[self setFloat:_fractionalWidthOfAPixel forUniform:fractionalWidthOfAPixelUniform program:filterProgram];
}
- (void)setAspectRatio:(CGFloat)newValue;
{
_aspectRatio = newValue;
[self setFloat:_aspectRatio forUniform:aspectRatioUniform program:filterProgram];
}
- (void)setCenter:(CGPoint)center
{
_center = center;
CGPoint rotatedPoint = [self rotatedPoint:center forRotation:inputRotation];
[self setPoint:rotatedPoint forUniform:centerUniform program:filterProgram];
}
- (void)setRadius:(CGFloat)radius
{
_radius = radius;
[self setFloat:_radius forUniform:radiusUniform program:filterProgram];
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImagePixellatePositionFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 1,271 |
```objective-c
#import "GPUImageFilterGroup.h"
#import "GPUImageLowPassFilter.h"
#import "GPUImageDifferenceBlendFilter.h"
@interface GPUImageHighPassFilter : GPUImageFilterGroup
{
GPUImageLowPassFilter *lowPassFilter;
GPUImageDifferenceBlendFilter *differenceBlendFilter;
}
// This controls the degree by which the previous accumulated frames are blended and then subtracted from the current one. This ranges from 0.0 to 1.0, with a default of 0.5.
@property(readwrite, nonatomic) CGFloat filterStrength;
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageHighPassFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 119 |
```objective-c
#import "GPUImage3x3ConvolutionFilter.h"
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImage3x3ConvolutionFragmentShaderString = SHADER_STRING
(
precision highp float;
uniform sampler2D inputImageTexture;
uniform mediump mat3 convolutionMatrix;
varying vec2 textureCoordinate;
varying vec2 leftTextureCoordinate;
varying vec2 rightTextureCoordinate;
varying vec2 topTextureCoordinate;
varying vec2 topLeftTextureCoordinate;
varying vec2 topRightTextureCoordinate;
varying vec2 bottomTextureCoordinate;
varying vec2 bottomLeftTextureCoordinate;
varying vec2 bottomRightTextureCoordinate;
void main()
{
mediump vec3 bottomColor = texture2D(inputImageTexture, bottomTextureCoordinate).rgb;
mediump vec3 bottomLeftColor = texture2D(inputImageTexture, bottomLeftTextureCoordinate).rgb;
mediump vec3 bottomRightColor = texture2D(inputImageTexture, bottomRightTextureCoordinate).rgb;
mediump vec4 centerColor = texture2D(inputImageTexture, textureCoordinate);
mediump vec3 leftColor = texture2D(inputImageTexture, leftTextureCoordinate).rgb;
mediump vec3 rightColor = texture2D(inputImageTexture, rightTextureCoordinate).rgb;
mediump vec3 topColor = texture2D(inputImageTexture, topTextureCoordinate).rgb;
mediump vec3 topRightColor = texture2D(inputImageTexture, topRightTextureCoordinate).rgb;
mediump vec3 topLeftColor = texture2D(inputImageTexture, topLeftTextureCoordinate).rgb;
mediump vec3 resultColor = topLeftColor * convolutionMatrix[0][0] + topColor * convolutionMatrix[0][1] + topRightColor * convolutionMatrix[0][2];
resultColor += leftColor * convolutionMatrix[1][0] + centerColor.rgb * convolutionMatrix[1][1] + rightColor * convolutionMatrix[1][2];
resultColor += bottomLeftColor * convolutionMatrix[2][0] + bottomColor * convolutionMatrix[2][1] + bottomRightColor * convolutionMatrix[2][2];
gl_FragColor = vec4(resultColor, centerColor.a);
}
);
#else
NSString *const kGPUImage3x3ConvolutionFragmentShaderString = SHADER_STRING
(
uniform sampler2D inputImageTexture;
uniform mat3 convolutionMatrix;
varying vec2 textureCoordinate;
varying vec2 leftTextureCoordinate;
varying vec2 rightTextureCoordinate;
varying vec2 topTextureCoordinate;
varying vec2 topLeftTextureCoordinate;
varying vec2 topRightTextureCoordinate;
varying vec2 bottomTextureCoordinate;
varying vec2 bottomLeftTextureCoordinate;
varying vec2 bottomRightTextureCoordinate;
void main()
{
vec3 bottomColor = texture2D(inputImageTexture, bottomTextureCoordinate).rgb;
vec3 bottomLeftColor = texture2D(inputImageTexture, bottomLeftTextureCoordinate).rgb;
vec3 bottomRightColor = texture2D(inputImageTexture, bottomRightTextureCoordinate).rgb;
vec4 centerColor = texture2D(inputImageTexture, textureCoordinate);
vec3 leftColor = texture2D(inputImageTexture, leftTextureCoordinate).rgb;
vec3 rightColor = texture2D(inputImageTexture, rightTextureCoordinate).rgb;
vec3 topColor = texture2D(inputImageTexture, topTextureCoordinate).rgb;
vec3 topRightColor = texture2D(inputImageTexture, topRightTextureCoordinate).rgb;
vec3 topLeftColor = texture2D(inputImageTexture, topLeftTextureCoordinate).rgb;
vec3 resultColor = topLeftColor * convolutionMatrix[0][0] + topColor * convolutionMatrix[0][1] + topRightColor * convolutionMatrix[0][2];
resultColor += leftColor * convolutionMatrix[1][0] + centerColor.rgb * convolutionMatrix[1][1] + rightColor * convolutionMatrix[1][2];
resultColor += bottomLeftColor * convolutionMatrix[2][0] + bottomColor * convolutionMatrix[2][1] + bottomRightColor * convolutionMatrix[2][2];
gl_FragColor = vec4(resultColor, centerColor.a);
}
);
#endif
@implementation GPUImage3x3ConvolutionFilter
@synthesize convolutionKernel = _convolutionKernel;
#pragma mark -
#pragma mark Initialization and teardown
- (id)init;
{
if (!(self = [self initWithFragmentShaderFromString:kGPUImage3x3ConvolutionFragmentShaderString]))
{
return nil;
}
self.convolutionKernel = (GPUMatrix3x3){
{0.f, 0.f, 0.f},
{0.f, 1.f, 0.f},
{0.f, 0.f, 0.f}
};
return self;
}
- (id)initWithFragmentShaderFromString:(NSString *)fragmentShaderString;
{
if (!(self = [super initWithFragmentShaderFromString:fragmentShaderString]))
{
return nil;
}
convolutionMatrixUniform = [filterProgram uniformIndex:@"convolutionMatrix"];
return self;
}
#pragma mark -
#pragma mark Accessors
- (void)setConvolutionKernel:(GPUMatrix3x3)newValue;
{
_convolutionKernel = newValue;
[self setMatrix3f:_convolutionKernel forUniform:convolutionMatrixUniform program:filterProgram];
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImage3x3ConvolutionFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 1,133 |
```objective-c
#import "GPUImageBilateralFilter.h"
NSString *const kGPUImageBilateralBlurVertexShaderString = SHADER_STRING
(
attribute vec4 position;
attribute vec4 inputTextureCoordinate;
const int GAUSSIAN_SAMPLES = 9;
uniform float texelWidthOffset;
uniform float texelHeightOffset;
varying vec2 textureCoordinate;
varying vec2 blurCoordinates[GAUSSIAN_SAMPLES];
void main()
{
gl_Position = position;
textureCoordinate = inputTextureCoordinate.xy;
// Calculate the positions for the blur
int multiplier = 0;
vec2 blurStep;
vec2 singleStepOffset = vec2(texelWidthOffset, texelHeightOffset);
for (int i = 0; i < GAUSSIAN_SAMPLES; i++)
{
multiplier = (i - ((GAUSSIAN_SAMPLES - 1) / 2));
// Blur in x (horizontal)
blurStep = float(multiplier) * singleStepOffset;
blurCoordinates[i] = inputTextureCoordinate.xy + blurStep;
}
}
);
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageBilateralFilterFragmentShaderString = SHADER_STRING
(
uniform sampler2D inputImageTexture;
const lowp int GAUSSIAN_SAMPLES = 9;
varying highp vec2 textureCoordinate;
varying highp vec2 blurCoordinates[GAUSSIAN_SAMPLES];
uniform mediump float distanceNormalizationFactor;
void main()
{
lowp vec4 centralColor;
lowp float gaussianWeightTotal;
lowp vec4 sum;
lowp vec4 sampleColor;
lowp float distanceFromCentralColor;
lowp float gaussianWeight;
centralColor = texture2D(inputImageTexture, blurCoordinates[4]);
gaussianWeightTotal = 0.18;
sum = centralColor * 0.18;
sampleColor = texture2D(inputImageTexture, blurCoordinates[0]);
distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0);
gaussianWeight = 0.05 * (1.0 - distanceFromCentralColor);
gaussianWeightTotal += gaussianWeight;
sum += sampleColor * gaussianWeight;
sampleColor = texture2D(inputImageTexture, blurCoordinates[1]);
distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0);
gaussianWeight = 0.09 * (1.0 - distanceFromCentralColor);
gaussianWeightTotal += gaussianWeight;
sum += sampleColor * gaussianWeight;
sampleColor = texture2D(inputImageTexture, blurCoordinates[2]);
distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0);
gaussianWeight = 0.12 * (1.0 - distanceFromCentralColor);
gaussianWeightTotal += gaussianWeight;
sum += sampleColor * gaussianWeight;
sampleColor = texture2D(inputImageTexture, blurCoordinates[3]);
distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0);
gaussianWeight = 0.15 * (1.0 - distanceFromCentralColor);
gaussianWeightTotal += gaussianWeight;
sum += sampleColor * gaussianWeight;
sampleColor = texture2D(inputImageTexture, blurCoordinates[5]);
distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0);
gaussianWeight = 0.15 * (1.0 - distanceFromCentralColor);
gaussianWeightTotal += gaussianWeight;
sum += sampleColor * gaussianWeight;
sampleColor = texture2D(inputImageTexture, blurCoordinates[6]);
distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0);
gaussianWeight = 0.12 * (1.0 - distanceFromCentralColor);
gaussianWeightTotal += gaussianWeight;
sum += sampleColor * gaussianWeight;
sampleColor = texture2D(inputImageTexture, blurCoordinates[7]);
distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0);
gaussianWeight = 0.09 * (1.0 - distanceFromCentralColor);
gaussianWeightTotal += gaussianWeight;
sum += sampleColor * gaussianWeight;
sampleColor = texture2D(inputImageTexture, blurCoordinates[8]);
distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0);
gaussianWeight = 0.05 * (1.0 - distanceFromCentralColor);
gaussianWeightTotal += gaussianWeight;
sum += sampleColor * gaussianWeight;
gl_FragColor = sum / gaussianWeightTotal;
}
);
#else
NSString *const kGPUImageBilateralFilterFragmentShaderString = SHADER_STRING
(
uniform sampler2D inputImageTexture;
const int GAUSSIAN_SAMPLES = 9;
varying vec2 textureCoordinate;
varying vec2 blurCoordinates[GAUSSIAN_SAMPLES];
uniform float distanceNormalizationFactor;
void main()
{
vec4 centralColor;
float gaussianWeightTotal;
vec4 sum;
vec4 sampleColor;
float distanceFromCentralColor;
float gaussianWeight;
centralColor = texture2D(inputImageTexture, blurCoordinates[4]);
gaussianWeightTotal = 0.18;
sum = centralColor * 0.18;
sampleColor = texture2D(inputImageTexture, blurCoordinates[0]);
distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0);
gaussianWeight = 0.05 * (1.0 - distanceFromCentralColor);
gaussianWeightTotal += gaussianWeight;
sum += sampleColor * gaussianWeight;
sampleColor = texture2D(inputImageTexture, blurCoordinates[1]);
distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0);
gaussianWeight = 0.09 * (1.0 - distanceFromCentralColor);
gaussianWeightTotal += gaussianWeight;
sum += sampleColor * gaussianWeight;
sampleColor = texture2D(inputImageTexture, blurCoordinates[2]);
distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0);
gaussianWeight = 0.12 * (1.0 - distanceFromCentralColor);
gaussianWeightTotal += gaussianWeight;
sum += sampleColor * gaussianWeight;
sampleColor = texture2D(inputImageTexture, blurCoordinates[3]);
distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0);
gaussianWeight = 0.15 * (1.0 - distanceFromCentralColor);
gaussianWeightTotal += gaussianWeight;
sum += sampleColor * gaussianWeight;
sampleColor = texture2D(inputImageTexture, blurCoordinates[5]);
distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0);
gaussianWeight = 0.15 * (1.0 - distanceFromCentralColor);
gaussianWeightTotal += gaussianWeight;
sum += sampleColor * gaussianWeight;
sampleColor = texture2D(inputImageTexture, blurCoordinates[6]);
distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0);
gaussianWeight = 0.12 * (1.0 - distanceFromCentralColor);
gaussianWeightTotal += gaussianWeight;
sum += sampleColor * gaussianWeight;
sampleColor = texture2D(inputImageTexture, blurCoordinates[7]);
distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0);
gaussianWeight = 0.09 * (1.0 - distanceFromCentralColor);
gaussianWeightTotal += gaussianWeight;
sum += sampleColor * gaussianWeight;
sampleColor = texture2D(inputImageTexture, blurCoordinates[8]);
distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0);
gaussianWeight = 0.05 * (1.0 - distanceFromCentralColor);
gaussianWeightTotal += gaussianWeight;
sum += sampleColor * gaussianWeight;
gl_FragColor = sum / gaussianWeightTotal;
}
);
#endif
@implementation GPUImageBilateralFilter
@synthesize distanceNormalizationFactor = _distanceNormalizationFactor;
- (id)init;
{
if (!(self = [super initWithFirstStageVertexShaderFromString:kGPUImageBilateralBlurVertexShaderString
firstStageFragmentShaderFromString:kGPUImageBilateralFilterFragmentShaderString
secondStageVertexShaderFromString:kGPUImageBilateralBlurVertexShaderString
secondStageFragmentShaderFromString:kGPUImageBilateralFilterFragmentShaderString])) {
return nil;
}
firstDistanceNormalizationFactorUniform = [filterProgram uniformIndex:@"distanceNormalizationFactor"];
secondDistanceNormalizationFactorUniform = [filterProgram uniformIndex:@"distanceNormalizationFactor"];
self.texelSpacingMultiplier = 4.0;
self.distanceNormalizationFactor = 8.0;
return self;
}
#pragma mark -
#pragma mark Accessors
- (void)setDistanceNormalizationFactor:(CGFloat)newValue
{
_distanceNormalizationFactor = newValue;
[self setFloat:newValue
forUniform:firstDistanceNormalizationFactorUniform
program:filterProgram];
[self setFloat:newValue
forUniform:secondDistanceNormalizationFactorUniform
program:secondFilterProgram];
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageBilateralFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 2,070 |
```objective-c
#import "GPUImagePolarPixellateFilter.h"
// @fattjake based on vid by toneburst
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImagePolarPixellateFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform highp vec2 center;
uniform highp vec2 pixelSize;
void main()
{
highp vec2 normCoord = 2.0 * textureCoordinate - 1.0;
highp vec2 normCenter = 2.0 * center - 1.0;
normCoord -= normCenter;
highp float r = length(normCoord); // to polar coords
highp float phi = atan(normCoord.y, normCoord.x); // to polar coords
r = r - mod(r, pixelSize.x) + 0.03;
phi = phi - mod(phi, pixelSize.y);
normCoord.x = r * cos(phi);
normCoord.y = r * sin(phi);
normCoord += normCenter;
mediump vec2 textureCoordinateToUse = normCoord / 2.0 + 0.5;
gl_FragColor = texture2D(inputImageTexture, textureCoordinateToUse );
}
);
#else
NSString *const kGPUImagePolarPixellateFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform vec2 center;
uniform vec2 pixelSize;
void main()
{
vec2 normCoord = 2.0 * textureCoordinate - 1.0;
vec2 normCenter = 2.0 * center - 1.0;
normCoord -= normCenter;
float r = length(normCoord); // to polar coords
float phi = atan(normCoord.y, normCoord.x); // to polar coords
r = r - mod(r, pixelSize.x) + 0.03;
phi = phi - mod(phi, pixelSize.y);
normCoord.x = r * cos(phi);
normCoord.y = r * sin(phi);
normCoord += normCenter;
vec2 textureCoordinateToUse = normCoord / 2.0 + 0.5;
gl_FragColor = texture2D(inputImageTexture, textureCoordinateToUse );
}
);
#endif
@implementation GPUImagePolarPixellateFilter
@synthesize center = _center;
@synthesize pixelSize = _pixelSize;
#pragma mark -
#pragma mark Initialization and teardown
- (id)init;
{
if (!(self = [super initWithFragmentShaderFromString:kGPUImagePolarPixellateFragmentShaderString]))
{
return nil;
}
pixelSizeUniform = [filterProgram uniformIndex:@"pixelSize"];
centerUniform = [filterProgram uniformIndex:@"center"];
self.pixelSize = CGSizeMake(0.05, 0.05);
self.center = CGPointMake(0.5, 0.5);
return self;
}
#pragma mark -
#pragma mark Accessors
- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;
{
[super setInputRotation:newInputRotation atIndex:textureIndex];
[self setCenter:self.center];
}
- (void)setPixelSize:(CGSize)pixelSize
{
_pixelSize = pixelSize;
[self setSize:_pixelSize forUniform:pixelSizeUniform program:filterProgram];
}
- (void)setCenter:(CGPoint)newValue;
{
_center = newValue;
CGPoint rotatedPoint = [self rotatedPoint:_center forRotation:inputRotation];
[self setPoint:rotatedPoint forUniform:centerUniform program:filterProgram];
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImagePolarPixellateFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 807 |
```objective-c
#import <UIKit/UIKit.h>
#import "GPUImageContext.h"
typedef enum {
kGPUImageFillModeStretch, // Stretch to fill the full view, which may distort the image outside of its normal aspect ratio
kGPUImageFillModePreserveAspectRatio, // Maintains the aspect ratio of the source image, adding bars of the specified background color
kGPUImageFillModePreserveAspectRatioAndFill // Maintains the aspect ratio of the source image, zooming in on its center to fill the view
} GPUImageFillModeType;
/**
UIView subclass to use as an endpoint for displaying GPUImage outputs
*/
@interface GPUImageView : UIView <GPUImageInput>
{
GPUImageRotationMode inputRotation;
}
/** The fill mode dictates how images are fit in the view, with the default being kGPUImageFillModePreserveAspectRatio
*/
@property(readwrite, nonatomic) GPUImageFillModeType fillMode;
/** This calculates the current display size, in pixels, taking into account Retina scaling factors
*/
@property(readonly, nonatomic) CGSize sizeInPixels;
@property(nonatomic) BOOL enabled;
/** Handling fill mode
@param redComponent Red component for background color
@param greenComponent Green component for background color
@param blueComponent Blue component for background color
@param alphaComponent Alpha component for background color
*/
- (void)setBackgroundColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent alpha:(GLfloat)alphaComponent;
- (void)setCurrentlyReceivingMonochromeInput:(BOOL)newValue;
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/iOS/GPUImageView.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 323 |
```objective-c
#import "GPUImageView.h"
#import <OpenGLES/EAGLDrawable.h>
#import <QuartzCore/QuartzCore.h>
#import "GPUImageContext.h"
#import "GPUImageFilter.h"
#import <AVFoundation/AVFoundation.h>
#pragma mark -
#pragma mark Private methods and instance variables
@interface GPUImageView ()
{
GPUImageFramebuffer *inputFramebufferForDisplay;
GLuint displayRenderbuffer, displayFramebuffer;
GLProgram *displayProgram;
GLint displayPositionAttribute, displayTextureCoordinateAttribute;
GLint displayInputTextureUniform;
CGSize inputImageSize;
GLfloat imageVertices[8];
GLfloat backgroundColorRed, backgroundColorGreen, backgroundColorBlue, backgroundColorAlpha;
CGSize boundsSizeAtFrameBufferEpoch;
}
@property (assign, nonatomic) NSUInteger aspectRatio;
// Initialization and teardown
- (void)commonInit;
// Managing the display FBOs
- (void)createDisplayFramebuffer;
- (void)destroyDisplayFramebuffer;
// Handling fill mode
- (void)recalculateViewGeometry;
@end
@implementation GPUImageView
@synthesize aspectRatio;
@synthesize sizeInPixels = _sizeInPixels;
@synthesize fillMode = _fillMode;
@synthesize enabled;
#pragma mark -
#pragma mark Initialization and teardown
+ (Class)layerClass
{
return [CAEAGLLayer class];
}
- (id)initWithFrame:(CGRect)frame
{
if (!(self = [super initWithFrame:frame]))
{
return nil;
}
[self commonInit];
return self;
}
-(id)initWithCoder:(NSCoder *)coder
{
if (!(self = [super initWithCoder:coder]))
{
return nil;
}
[self commonInit];
return self;
}
- (void)commonInit;
{
// Set scaling to account for Retina display
if ([self respondsToSelector:@selector(setContentScaleFactor:)])
{
self.contentScaleFactor = [[UIScreen mainScreen] scale];
}
inputRotation = kGPUImageNoRotation;
self.opaque = YES;
self.hidden = NO;
CAEAGLLayer *eaglLayer = (CAEAGLLayer *)self.layer;
eaglLayer.opaque = YES;
eaglLayer.drawableProperties = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithBool:NO], kEAGLDrawablePropertyRetainedBacking, kEAGLColorFormatRGBA8, kEAGLDrawablePropertyColorFormat, nil];
self.enabled = YES;
runSynchronouslyOnVideoProcessingQueue(^{
[GPUImageContext useImageProcessingContext];
displayProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImagePassthroughFragmentShaderString];
if (!displayProgram.initialized)
{
[displayProgram addAttribute:@"position"];
[displayProgram addAttribute:@"inputTextureCoordinate"];
if (![displayProgram link])
{
NSString *progLog = [displayProgram programLog];
NSLog(@"Program link log: %@", progLog);
NSString *fragLog = [displayProgram fragmentShaderLog];
NSLog(@"Fragment shader compile log: %@", fragLog);
NSString *vertLog = [displayProgram vertexShaderLog];
NSLog(@"Vertex shader compile log: %@", vertLog);
displayProgram = nil;
NSAssert(NO, @"Filter shader link failed");
}
}
displayPositionAttribute = [displayProgram attributeIndex:@"position"];
displayTextureCoordinateAttribute = [displayProgram attributeIndex:@"inputTextureCoordinate"];
displayInputTextureUniform = [displayProgram uniformIndex:@"inputImageTexture"]; // This does assume a name of "inputTexture" for the fragment shader
[GPUImageContext setActiveShaderProgram:displayProgram];
glEnableVertexAttribArray(displayPositionAttribute);
glEnableVertexAttribArray(displayTextureCoordinateAttribute);
[self setBackgroundColorRed:0.0 green:0.0 blue:0.0 alpha:1.0];
_fillMode = kGPUImageFillModePreserveAspectRatio;
[self createDisplayFramebuffer];
});
}
- (void)layoutSubviews {
[super layoutSubviews];
// The frame buffer needs to be trashed and re-created when the view size changes.
if (!CGSizeEqualToSize(self.bounds.size, boundsSizeAtFrameBufferEpoch) &&
!CGSizeEqualToSize(self.bounds.size, CGSizeZero)) {
runSynchronouslyOnVideoProcessingQueue(^{
[self destroyDisplayFramebuffer];
[self createDisplayFramebuffer];
[self recalculateViewGeometry];
});
}
}
- (void)dealloc
{
runSynchronouslyOnVideoProcessingQueue(^{
[self destroyDisplayFramebuffer];
});
}
#pragma mark -
#pragma mark Managing the display FBOs
- (void)createDisplayFramebuffer;
{
[GPUImageContext useImageProcessingContext];
glGenFramebuffers(1, &displayFramebuffer);
glBindFramebuffer(GL_FRAMEBUFFER, displayFramebuffer);
glGenRenderbuffers(1, &displayRenderbuffer);
glBindRenderbuffer(GL_RENDERBUFFER, displayRenderbuffer);
[[[GPUImageContext sharedImageProcessingContext] context] renderbufferStorage:GL_RENDERBUFFER fromDrawable:(CAEAGLLayer*)self.layer];
GLint backingWidth, backingHeight;
glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_WIDTH, &backingWidth);
glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_HEIGHT, &backingHeight);
if ( (backingWidth == 0) || (backingHeight == 0) )
{
[self destroyDisplayFramebuffer];
return;
}
_sizeInPixels.width = (CGFloat)backingWidth;
_sizeInPixels.height = (CGFloat)backingHeight;
// NSLog(@"Backing width: %d, height: %d", backingWidth, backingHeight);
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, displayRenderbuffer);
GLuint framebufferCreationStatus = glCheckFramebufferStatus(GL_FRAMEBUFFER);
NSAssert(framebufferCreationStatus == GL_FRAMEBUFFER_COMPLETE, @"Failure with display framebuffer generation for display of size: %f, %f", self.bounds.size.width, self.bounds.size.height);
boundsSizeAtFrameBufferEpoch = self.bounds.size;
}
- (void)destroyDisplayFramebuffer;
{
[GPUImageContext useImageProcessingContext];
if (displayFramebuffer)
{
glDeleteFramebuffers(1, &displayFramebuffer);
displayFramebuffer = 0;
}
if (displayRenderbuffer)
{
glDeleteRenderbuffers(1, &displayRenderbuffer);
displayRenderbuffer = 0;
}
}
- (void)setDisplayFramebuffer;
{
if (!displayFramebuffer)
{
[self createDisplayFramebuffer];
}
glBindFramebuffer(GL_FRAMEBUFFER, displayFramebuffer);
glViewport(0, 0, (GLint)_sizeInPixels.width, (GLint)_sizeInPixels.height);
}
- (void)presentFramebuffer;
{
glBindRenderbuffer(GL_RENDERBUFFER, displayRenderbuffer);
[[GPUImageContext sharedImageProcessingContext] presentBufferForDisplay];
}
#pragma mark -
#pragma mark Handling fill mode
- (void)recalculateViewGeometry;
{
runSynchronouslyOnVideoProcessingQueue(^{
CGFloat heightScaling, widthScaling;
CGSize currentViewSize = self.bounds.size;
// CGFloat imageAspectRatio = inputImageSize.width / inputImageSize.height;
// CGFloat viewAspectRatio = currentViewSize.width / currentViewSize.height;
CGRect insetRect = AVMakeRectWithAspectRatioInsideRect(inputImageSize, self.bounds);
switch(_fillMode)
{
case kGPUImageFillModeStretch:
{
widthScaling = 1.0;
heightScaling = 1.0;
}; break;
case kGPUImageFillModePreserveAspectRatio:
{
widthScaling = insetRect.size.width / currentViewSize.width;
heightScaling = insetRect.size.height / currentViewSize.height;
}; break;
case kGPUImageFillModePreserveAspectRatioAndFill:
{
// CGFloat widthHolder = insetRect.size.width / currentViewSize.width;
widthScaling = currentViewSize.height / insetRect.size.height;
heightScaling = currentViewSize.width / insetRect.size.width;
}; break;
}
imageVertices[0] = -widthScaling;
imageVertices[1] = -heightScaling;
imageVertices[2] = widthScaling;
imageVertices[3] = -heightScaling;
imageVertices[4] = -widthScaling;
imageVertices[5] = heightScaling;
imageVertices[6] = widthScaling;
imageVertices[7] = heightScaling;
});
// static const GLfloat imageVertices[] = {
// -1.0f, -1.0f,
// 1.0f, -1.0f,
// -1.0f, 1.0f,
// 1.0f, 1.0f,
// };
}
- (void)setBackgroundColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent alpha:(GLfloat)alphaComponent;
{
backgroundColorRed = redComponent;
backgroundColorGreen = greenComponent;
backgroundColorBlue = blueComponent;
backgroundColorAlpha = alphaComponent;
}
+ (const GLfloat *)textureCoordinatesForRotation:(GPUImageRotationMode)rotationMode;
{
// static const GLfloat noRotationTextureCoordinates[] = {
// 0.0f, 0.0f,
// 1.0f, 0.0f,
// 0.0f, 1.0f,
// 1.0f, 1.0f,
// };
static const GLfloat noRotationTextureCoordinates[] = {
0.0f, 1.0f,
1.0f, 1.0f,
0.0f, 0.0f,
1.0f, 0.0f,
};
static const GLfloat rotateRightTextureCoordinates[] = {
1.0f, 1.0f,
1.0f, 0.0f,
0.0f, 1.0f,
0.0f, 0.0f,
};
static const GLfloat rotateLeftTextureCoordinates[] = {
0.0f, 0.0f,
0.0f, 1.0f,
1.0f, 0.0f,
1.0f, 1.0f,
};
static const GLfloat verticalFlipTextureCoordinates[] = {
0.0f, 0.0f,
1.0f, 0.0f,
0.0f, 1.0f,
1.0f, 1.0f,
};
static const GLfloat horizontalFlipTextureCoordinates[] = {
1.0f, 1.0f,
0.0f, 1.0f,
1.0f, 0.0f,
0.0f, 0.0f,
};
static const GLfloat rotateRightVerticalFlipTextureCoordinates[] = {
1.0f, 0.0f,
1.0f, 1.0f,
0.0f, 0.0f,
0.0f, 1.0f,
};
static const GLfloat rotateRightHorizontalFlipTextureCoordinates[] = {
1.0f, 1.0f,
1.0f, 0.0f,
0.0f, 1.0f,
0.0f, 0.0f,
};
static const GLfloat rotate180TextureCoordinates[] = {
1.0f, 0.0f,
0.0f, 0.0f,
1.0f, 1.0f,
0.0f, 1.0f,
};
switch(rotationMode)
{
case kGPUImageNoRotation: return noRotationTextureCoordinates;
case kGPUImageRotateLeft: return rotateLeftTextureCoordinates;
case kGPUImageRotateRight: return rotateRightTextureCoordinates;
case kGPUImageFlipVertical: return verticalFlipTextureCoordinates;
case kGPUImageFlipHorizonal: return horizontalFlipTextureCoordinates;
case kGPUImageRotateRightFlipVertical: return rotateRightVerticalFlipTextureCoordinates;
case kGPUImageRotateRightFlipHorizontal: return rotateRightHorizontalFlipTextureCoordinates;
case kGPUImageRotate180: return rotate180TextureCoordinates;
}
}
#pragma mark -
#pragma mark GPUInput protocol
- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex;
{
runSynchronouslyOnVideoProcessingQueue(^{
[GPUImageContext setActiveShaderProgram:displayProgram];
[self setDisplayFramebuffer];
glClearColor(backgroundColorRed, backgroundColorGreen, backgroundColorBlue, backgroundColorAlpha);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glActiveTexture(GL_TEXTURE4);
glBindTexture(GL_TEXTURE_2D, [inputFramebufferForDisplay texture]);
glUniform1i(displayInputTextureUniform, 4);
glVertexAttribPointer(displayPositionAttribute, 2, GL_FLOAT, 0, 0, imageVertices);
glVertexAttribPointer(displayTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, [GPUImageView textureCoordinatesForRotation:inputRotation]);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
[self presentFramebuffer];
[inputFramebufferForDisplay unlock];
inputFramebufferForDisplay = nil;
});
}
- (NSInteger)nextAvailableTextureIndex;
{
return 0;
}
- (void)setInputFramebuffer:(GPUImageFramebuffer *)newInputFramebuffer atIndex:(NSInteger)textureIndex;
{
inputFramebufferForDisplay = newInputFramebuffer;
[inputFramebufferForDisplay lock];
}
- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;
{
inputRotation = newInputRotation;
}
- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;
{
runSynchronouslyOnVideoProcessingQueue(^{
CGSize rotatedSize = newSize;
if (GPUImageRotationSwapsWidthAndHeight(inputRotation))
{
rotatedSize.width = newSize.height;
rotatedSize.height = newSize.width;
}
if (!CGSizeEqualToSize(inputImageSize, rotatedSize))
{
inputImageSize = rotatedSize;
[self recalculateViewGeometry];
}
});
}
- (CGSize)maximumOutputSize;
{
if ([self respondsToSelector:@selector(setContentScaleFactor:)])
{
CGSize pointSize = self.bounds.size;
return CGSizeMake(self.contentScaleFactor * pointSize.width, self.contentScaleFactor * pointSize.height);
}
else
{
return self.bounds.size;
}
}
- (void)endProcessing
{
}
- (BOOL)shouldIgnoreUpdatesToThisTarget;
{
return NO;
}
- (BOOL)wantsMonochromeInput;
{
return NO;
}
- (void)setCurrentlyReceivingMonochromeInput:(BOOL)newValue;
{
}
#pragma mark -
#pragma mark Accessors
- (CGSize)sizeInPixels;
{
if (CGSizeEqualToSize(_sizeInPixels, CGSizeZero))
{
return [self maximumOutputSize];
}
else
{
return _sizeInPixels;
}
}
- (void)setFillMode:(GPUImageFillModeType)newValue;
{
_fillMode = newValue;
[self recalculateViewGeometry];
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/iOS/GPUImageView.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 3,333 |
```objective-c
//
// GPUImagePicture+TextureSubimage.h
// GPUImage
//
// Created by Jack Wu on 2014-05-28.
//
#import "GPUImagePicture.h"
@interface GPUImagePicture (TextureSubimage)
- (void)replaceTextureWithSubimage:(UIImage*)subimage;
- (void)replaceTextureWithSubCGImage:(CGImageRef)subimageSource;
- (void)replaceTextureWithSubimage:(UIImage*)subimage inRect:(CGRect)subRect;
- (void)replaceTextureWithSubCGImage:(CGImageRef)subimageSource inRect:(CGRect)subRect;
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/iOS/GPUImagePicture+TextureSubimage.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 135 |
```objective-c
#import "GLProgram.h"
#import "GPUImageFramebuffer.h"
#import "GPUImageFramebufferCache.h"
#define GPUImageRotationSwapsWidthAndHeight(rotation) ((rotation) == kGPUImageRotateLeft || (rotation) == kGPUImageRotateRight || (rotation) == kGPUImageRotateRightFlipVertical || (rotation) == kGPUImageRotateRightFlipHorizontal)
typedef enum { kGPUImageNoRotation, kGPUImageRotateLeft, kGPUImageRotateRight, kGPUImageFlipVertical, kGPUImageFlipHorizonal, kGPUImageRotateRightFlipVertical, kGPUImageRotateRightFlipHorizontal, kGPUImageRotate180 } GPUImageRotationMode;
@interface GPUImageContext : NSObject
@property(readonly, nonatomic) dispatch_queue_t contextQueue;
@property(readwrite, retain, nonatomic) GLProgram *currentShaderProgram;
@property(readonly, retain, nonatomic) EAGLContext *context;
@property(readonly) CVOpenGLESTextureCacheRef coreVideoTextureCache;
@property(readonly) GPUImageFramebufferCache *framebufferCache;
+ (void *)contextKey;
+ (GPUImageContext *)sharedImageProcessingContext;
+ (dispatch_queue_t)sharedContextQueue;
+ (GPUImageFramebufferCache *)sharedFramebufferCache;
+ (void)useImageProcessingContext;
- (void)useAsCurrentContext;
+ (void)setActiveShaderProgram:(GLProgram *)shaderProgram;
- (void)setContextShaderProgram:(GLProgram *)shaderProgram;
+ (GLint)maximumTextureSizeForThisDevice;
+ (GLint)maximumTextureUnitsForThisDevice;
+ (GLint)maximumVaryingVectorsForThisDevice;
+ (BOOL)deviceSupportsOpenGLESExtension:(NSString *)extension;
+ (BOOL)deviceSupportsRedTextures;
+ (BOOL)deviceSupportsFramebufferReads;
+ (CGSize)sizeThatFitsWithinATextureForSize:(CGSize)inputSize;
- (void)presentBufferForDisplay;
- (GLProgram *)programForVertexShaderString:(NSString *)vertexShaderString fragmentShaderString:(NSString *)fragmentShaderString;
- (void)useSharegroup:(EAGLSharegroup *)sharegroup;
// Manage fast texture upload
+ (BOOL)supportsFastTextureUpload;
@end
@protocol GPUImageInput <NSObject>
- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex;
- (void)setInputFramebuffer:(GPUImageFramebuffer *)newInputFramebuffer atIndex:(NSInteger)textureIndex;
- (NSInteger)nextAvailableTextureIndex;
- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;
- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;
- (CGSize)maximumOutputSize;
- (void)endProcessing;
- (BOOL)shouldIgnoreUpdatesToThisTarget;
- (BOOL)enabled;
- (BOOL)wantsMonochromeInput;
- (void)setCurrentlyReceivingMonochromeInput:(BOOL)newValue;
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/iOS/GPUImageContext.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 619 |
```objective-c
#import <UIKit/UIKit.h>
#import "GPUImageOutput.h"
@interface GPUImagePicture : GPUImageOutput
{
CGSize pixelSizeOfImage;
BOOL hasProcessedImage;
dispatch_semaphore_t imageUpdateSemaphore;
}
// Initialization and teardown
- (id)initWithURL:(NSURL *)url;
- (id)initWithImage:(UIImage *)newImageSource;
- (id)initWithCGImage:(CGImageRef)newImageSource;
- (id)initWithImage:(UIImage *)newImageSource smoothlyScaleOutput:(BOOL)smoothlyScaleOutput;
- (id)initWithCGImage:(CGImageRef)newImageSource smoothlyScaleOutput:(BOOL)smoothlyScaleOutput;
// Image rendering
- (void)processImage;
- (CGSize)outputImageSize;
/**
* Process image with all targets and filters asynchronously
* The completion handler is called after processing finished in the
* GPU's dispatch queue - and only if this method did not return NO.
*
* @returns NO if resource is blocked and processing is discarded, YES otherwise
*/
- (BOOL)processImageWithCompletionHandler:(void (^)(void))completion;
- (void)processImageUpToFilter:(GPUImageOutput<GPUImageInput> *)finalFilterInChain withCompletionHandler:(void (^)(UIImage *processedImage))block;
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/iOS/GPUImagePicture.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 264 |
```objective-c
#import <Foundation/Foundation.h>
#import <AVFoundation/AVFoundation.h>
#import "GPUImageContext.h"
extern NSString *const kGPUImageColorSwizzlingFragmentShaderString;
@protocol GPUImageMovieWriterDelegate <NSObject>
@optional
- (void)movieRecordingCompleted;
- (void)movieRecordingFailedWithError:(NSError*)error;
@end
@interface GPUImageMovieWriter : NSObject <GPUImageInput>
{
BOOL alreadyFinishedRecording;
NSURL *movieURL;
NSString *fileType;
AVAssetWriter *assetWriter;
AVAssetWriterInput *assetWriterAudioInput;
AVAssetWriterInput *assetWriterVideoInput;
AVAssetWriterInputPixelBufferAdaptor *assetWriterPixelBufferInput;
GPUImageContext *_movieWriterContext;
CVPixelBufferRef renderTarget;
CVOpenGLESTextureRef renderTexture;
CGSize videoSize;
GPUImageRotationMode inputRotation;
}
@property(readwrite, nonatomic) BOOL hasAudioTrack;
@property(readwrite, nonatomic) BOOL shouldPassthroughAudio;
@property(readwrite, nonatomic) BOOL shouldInvalidateAudioSampleWhenDone;
@property(nonatomic, copy) void(^completionBlock)(void);
@property(nonatomic, copy) void(^failureBlock)(NSError*);
@property(nonatomic, assign) id<GPUImageMovieWriterDelegate> delegate;
@property(readwrite, nonatomic) BOOL encodingLiveVideo;
@property(nonatomic, copy) BOOL(^videoInputReadyCallback)(void);
@property(nonatomic, copy) BOOL(^audioInputReadyCallback)(void);
@property(nonatomic, copy) void(^audioProcessingCallback)(SInt16 **samplesRef, CMItemCount numSamplesInBuffer);
@property(nonatomic) BOOL enabled;
@property(nonatomic, readonly) AVAssetWriter *assetWriter;
@property(nonatomic, readonly) CMTime duration;
@property(nonatomic, assign) CGAffineTransform transform;
@property(nonatomic, copy) NSArray *metaData;
@property(nonatomic, assign, getter = isPaused) BOOL paused;
@property(nonatomic, retain) GPUImageContext *movieWriterContext;
// Initialization and teardown
- (id)initWithMovieURL:(NSURL *)newMovieURL size:(CGSize)newSize;
- (id)initWithMovieURL:(NSURL *)newMovieURL size:(CGSize)newSize fileType:(NSString *)newFileType outputSettings:(NSDictionary *)outputSettings;
- (void)setHasAudioTrack:(BOOL)hasAudioTrack audioSettings:(NSDictionary *)audioOutputSettings;
// Movie recording
- (void)startRecording;
- (void)startRecordingInOrientation:(CGAffineTransform)orientationTransform;
- (void)finishRecording;
- (void)finishRecordingWithCompletionHandler:(void (^)(void))handler;
- (void)cancelRecording;
- (void)processAudioBuffer:(CMSampleBufferRef)audioBuffer;
- (void)enableSynchronizationCallbacks;
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/iOS/GPUImageMovieWriter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 562 |
```objective-c
#import "GPUImagePicture.h"
@implementation GPUImagePicture
#pragma mark -
#pragma mark Initialization and teardown
- (id)initWithURL:(NSURL *)url;
{
NSData *imageData = [[NSData alloc] initWithContentsOfURL:url];
if (!(self = [self initWithData:imageData]))
{
return nil;
}
return self;
}
- (id)initWithData:(NSData *)imageData;
{
UIImage *inputImage = [[UIImage alloc] initWithData:imageData];
if (!(self = [self initWithImage:inputImage]))
{
return nil;
}
return self;
}
- (id)initWithImage:(UIImage *)newImageSource;
{
if (!(self = [self initWithImage:newImageSource smoothlyScaleOutput:NO]))
{
return nil;
}
return self;
}
- (id)initWithCGImage:(CGImageRef)newImageSource;
{
if (!(self = [self initWithCGImage:newImageSource smoothlyScaleOutput:NO]))
{
return nil;
}
return self;
}
- (id)initWithImage:(UIImage *)newImageSource smoothlyScaleOutput:(BOOL)smoothlyScaleOutput;
{
return [self initWithCGImage:[newImageSource CGImage] smoothlyScaleOutput:smoothlyScaleOutput];
}
- (id)initWithCGImage:(CGImageRef)newImageSource smoothlyScaleOutput:(BOOL)smoothlyScaleOutput;
{
if (!(self = [super init]))
{
return nil;
}
hasProcessedImage = NO;
self.shouldSmoothlyScaleOutput = smoothlyScaleOutput;
imageUpdateSemaphore = dispatch_semaphore_create(0);
dispatch_semaphore_signal(imageUpdateSemaphore);
// TODO: Dispatch this whole thing asynchronously to move image loading off main thread
CGFloat widthOfImage = CGImageGetWidth(newImageSource);
CGFloat heightOfImage = CGImageGetHeight(newImageSource);
// If passed an empty image reference, CGContextDrawImage will fail in future versions of the SDK.
NSAssert( widthOfImage > 0 && heightOfImage > 0, @"Passed image must not be empty - it should be at least 1px tall and wide");
pixelSizeOfImage = CGSizeMake(widthOfImage, heightOfImage);
CGSize pixelSizeToUseForTexture = pixelSizeOfImage;
BOOL shouldRedrawUsingCoreGraphics = NO;
// For now, deal with images larger than the maximum texture size by resizing to be within that limit
CGSize scaledImageSizeToFitOnGPU = [GPUImageContext sizeThatFitsWithinATextureForSize:pixelSizeOfImage];
if (!CGSizeEqualToSize(scaledImageSizeToFitOnGPU, pixelSizeOfImage))
{
pixelSizeOfImage = scaledImageSizeToFitOnGPU;
pixelSizeToUseForTexture = pixelSizeOfImage;
shouldRedrawUsingCoreGraphics = YES;
}
if (self.shouldSmoothlyScaleOutput)
{
// In order to use mipmaps, you need to provide power-of-two textures, so convert to the next largest power of two and stretch to fill
CGFloat powerClosestToWidth = ceil(log2(pixelSizeOfImage.width));
CGFloat powerClosestToHeight = ceil(log2(pixelSizeOfImage.height));
pixelSizeToUseForTexture = CGSizeMake(pow(2.0, powerClosestToWidth), pow(2.0, powerClosestToHeight));
shouldRedrawUsingCoreGraphics = YES;
}
GLubyte *imageData = NULL;
CFDataRef dataFromImageDataProvider = NULL;
GLenum format = GL_BGRA;
if (!shouldRedrawUsingCoreGraphics) {
/* Check that the memory layout is compatible with GL, as we cannot use glPixelStore to
* tell GL about the memory layout with GLES.
*/
if (CGImageGetBytesPerRow(newImageSource) != CGImageGetWidth(newImageSource) * 4 ||
CGImageGetBitsPerPixel(newImageSource) != 32 ||
CGImageGetBitsPerComponent(newImageSource) != 8)
{
shouldRedrawUsingCoreGraphics = YES;
} else {
/* Check that the bitmap pixel format is compatible with GL */
CGBitmapInfo bitmapInfo = CGImageGetBitmapInfo(newImageSource);
if ((bitmapInfo & kCGBitmapFloatComponents) != 0) {
/* We don't support float components for use directly in GL */
shouldRedrawUsingCoreGraphics = YES;
} else {
CGBitmapInfo byteOrderInfo = bitmapInfo & kCGBitmapByteOrderMask;
if (byteOrderInfo == kCGBitmapByteOrder32Little) {
/* Little endian, for alpha-first we can use this bitmap directly in GL */
CGImageAlphaInfo alphaInfo = bitmapInfo & kCGBitmapAlphaInfoMask;
if (alphaInfo != kCGImageAlphaPremultipliedFirst && alphaInfo != kCGImageAlphaFirst &&
alphaInfo != kCGImageAlphaNoneSkipFirst) {
shouldRedrawUsingCoreGraphics = YES;
}
} else if (byteOrderInfo == kCGBitmapByteOrderDefault || byteOrderInfo == kCGBitmapByteOrder32Big) {
/* Big endian, for alpha-last we can use this bitmap directly in GL */
CGImageAlphaInfo alphaInfo = bitmapInfo & kCGBitmapAlphaInfoMask;
if (alphaInfo != kCGImageAlphaPremultipliedLast && alphaInfo != kCGImageAlphaLast &&
alphaInfo != kCGImageAlphaNoneSkipLast) {
shouldRedrawUsingCoreGraphics = YES;
} else {
/* Can access directly using GL_RGBA pixel format */
format = GL_RGBA;
}
}
}
}
}
// CFAbsoluteTime elapsedTime, startTime = CFAbsoluteTimeGetCurrent();
if (shouldRedrawUsingCoreGraphics)
{
// For resized or incompatible image: redraw
imageData = (GLubyte *) calloc(1, (int)pixelSizeToUseForTexture.width * (int)pixelSizeToUseForTexture.height * 4);
CGColorSpaceRef genericRGBColorspace = CGColorSpaceCreateDeviceRGB();
CGContextRef imageContext = CGBitmapContextCreate(imageData, (size_t)pixelSizeToUseForTexture.width, (size_t)pixelSizeToUseForTexture.height, 8, (size_t)pixelSizeToUseForTexture.width * 4, genericRGBColorspace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
// CGContextSetBlendMode(imageContext, kCGBlendModeCopy); // From Technical Q&A QA1708: path_to_url#qa/qa1708/_index.html
CGContextDrawImage(imageContext, CGRectMake(0.0, 0.0, pixelSizeToUseForTexture.width, pixelSizeToUseForTexture.height), newImageSource);
CGContextRelease(imageContext);
CGColorSpaceRelease(genericRGBColorspace);
}
else
{
// Access the raw image bytes directly
dataFromImageDataProvider = CGDataProviderCopyData(CGImageGetDataProvider(newImageSource));
imageData = (GLubyte *)CFDataGetBytePtr(dataFromImageDataProvider);
}
// elapsedTime = (CFAbsoluteTimeGetCurrent() - startTime) * 1000.0;
// NSLog(@"Core Graphics drawing time: %f", elapsedTime);
// CGFloat currentRedTotal = 0.0f, currentGreenTotal = 0.0f, currentBlueTotal = 0.0f, currentAlphaTotal = 0.0f;
// NSUInteger totalNumberOfPixels = round(pixelSizeToUseForTexture.width * pixelSizeToUseForTexture.height);
//
// for (NSUInteger currentPixel = 0; currentPixel < totalNumberOfPixels; currentPixel++)
// {
// currentBlueTotal += (CGFloat)imageData[(currentPixel * 4)] / 255.0f;
// currentGreenTotal += (CGFloat)imageData[(currentPixel * 4) + 1] / 255.0f;
// currentRedTotal += (CGFloat)imageData[(currentPixel * 4 + 2)] / 255.0f;
// currentAlphaTotal += (CGFloat)imageData[(currentPixel * 4) + 3] / 255.0f;
// }
//
// NSLog(@"Debug, average input image red: %f, green: %f, blue: %f, alpha: %f", currentRedTotal / (CGFloat)totalNumberOfPixels, currentGreenTotal / (CGFloat)totalNumberOfPixels, currentBlueTotal / (CGFloat)totalNumberOfPixels, currentAlphaTotal / (CGFloat)totalNumberOfPixels);
runSynchronouslyOnVideoProcessingQueue(^{
[GPUImageContext useImageProcessingContext];
outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:pixelSizeToUseForTexture onlyTexture:YES];
[outputFramebuffer disableReferenceCounting];
glBindTexture(GL_TEXTURE_2D, [outputFramebuffer texture]);
if (self.shouldSmoothlyScaleOutput)
{
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);
}
// no need to use self.outputTextureOptions here since pictures need this texture formats and type
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, (int)pixelSizeToUseForTexture.width, (int)pixelSizeToUseForTexture.height, 0, format, GL_UNSIGNED_BYTE, imageData);
if (self.shouldSmoothlyScaleOutput)
{
glGenerateMipmap(GL_TEXTURE_2D);
}
glBindTexture(GL_TEXTURE_2D, 0);
});
if (shouldRedrawUsingCoreGraphics)
{
free(imageData);
}
else
{
if (dataFromImageDataProvider)
{
CFRelease(dataFromImageDataProvider);
}
}
return self;
}
// ARC forbids explicit message send of 'release'; since iOS 6 even for dispatch_release() calls: stripping it out in that case is required.
- (void)dealloc;
{
[outputFramebuffer enableReferenceCounting];
[outputFramebuffer unlock];
#if !OS_OBJECT_USE_OBJC
if (imageUpdateSemaphore != NULL)
{
dispatch_release(imageUpdateSemaphore);
}
#endif
}
#pragma mark -
#pragma mark Image rendering
- (void)removeAllTargets;
{
[super removeAllTargets];
hasProcessedImage = NO;
}
- (void)processImage;
{
[self processImageWithCompletionHandler:nil];
}
- (BOOL)processImageWithCompletionHandler:(void (^)(void))completion;
{
hasProcessedImage = YES;
// dispatch_semaphore_wait(imageUpdateSemaphore, DISPATCH_TIME_FOREVER);
if (dispatch_semaphore_wait(imageUpdateSemaphore, DISPATCH_TIME_NOW) != 0)
{
return NO;
}
runAsynchronouslyOnVideoProcessingQueue(^{
for (id<GPUImageInput> currentTarget in targets)
{
NSInteger indexOfObject = [targets indexOfObject:currentTarget];
NSInteger textureIndexOfTarget = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
[currentTarget setCurrentlyReceivingMonochromeInput:NO];
[currentTarget setInputSize:pixelSizeOfImage atIndex:textureIndexOfTarget];
[currentTarget setInputFramebuffer:outputFramebuffer atIndex:textureIndexOfTarget];
[currentTarget newFrameReadyAtTime:kCMTimeIndefinite atIndex:textureIndexOfTarget];
}
dispatch_semaphore_signal(imageUpdateSemaphore);
if (completion != nil) {
completion();
}
});
return YES;
}
- (void)processImageUpToFilter:(GPUImageOutput<GPUImageInput> *)finalFilterInChain withCompletionHandler:(void (^)(UIImage *processedImage))block;
{
[finalFilterInChain useNextFrameForImageCapture];
[self processImageWithCompletionHandler:^{
UIImage *imageFromFilter = [finalFilterInChain imageFromCurrentFramebuffer];
block(imageFromFilter);
}];
}
- (CGSize)outputImageSize;
{
return pixelSizeOfImage;
}
- (void)addTarget:(id<GPUImageInput>)newTarget atTextureLocation:(NSInteger)textureLocation;
{
[super addTarget:newTarget atTextureLocation:textureLocation];
if (hasProcessedImage)
{
[newTarget setInputSize:pixelSizeOfImage atIndex:textureLocation];
[newTarget newFrameReadyAtTime:kCMTimeIndefinite atIndex:textureLocation];
}
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/iOS/GPUImagePicture.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 2,687 |
```objective-c
#import "GPUImageMovieWriter.h"
#import "GPUImageContext.h"
#import "GLProgram.h"
#import "GPUImageFilter.h"
NSString *const kGPUImageColorSwizzlingFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
void main()
{
gl_FragColor = texture2D(inputImageTexture, textureCoordinate).bgra;
}
);
@interface GPUImageMovieWriter ()
{
GLuint movieFramebuffer, movieRenderbuffer;
GLProgram *colorSwizzlingProgram;
GLint colorSwizzlingPositionAttribute, colorSwizzlingTextureCoordinateAttribute;
GLint colorSwizzlingInputTextureUniform;
GPUImageFramebuffer *firstInputFramebuffer;
CMTime startTime, previousFrameTime, previousAudioTime;
dispatch_queue_t audioQueue, videoQueue;
BOOL audioEncodingIsFinished, videoEncodingIsFinished;
BOOL isRecording;
}
// Movie recording
- (void)initializeMovieWithOutputSettings:(NSMutableDictionary *)outputSettings;
// Frame rendering
- (void)createDataFBO;
- (void)destroyDataFBO;
- (void)setFilterFBO;
- (void)renderAtInternalSizeUsingFramebuffer:(GPUImageFramebuffer *)inputFramebufferToUse;
@end
@implementation GPUImageMovieWriter
@synthesize hasAudioTrack = _hasAudioTrack;
@synthesize encodingLiveVideo = _encodingLiveVideo;
@synthesize shouldPassthroughAudio = _shouldPassthroughAudio;
@synthesize completionBlock;
@synthesize failureBlock;
@synthesize videoInputReadyCallback;
@synthesize audioInputReadyCallback;
@synthesize enabled;
@synthesize shouldInvalidateAudioSampleWhenDone = _shouldInvalidateAudioSampleWhenDone;
@synthesize paused = _paused;
@synthesize movieWriterContext = _movieWriterContext;
@synthesize delegate = _delegate;
#pragma mark -
#pragma mark Initialization and teardown
- (id)initWithMovieURL:(NSURL *)newMovieURL size:(CGSize)newSize;
{
return [self initWithMovieURL:newMovieURL size:newSize fileType:AVFileTypeQuickTimeMovie outputSettings:nil];
}
- (id)initWithMovieURL:(NSURL *)newMovieURL size:(CGSize)newSize fileType:(NSString *)newFileType outputSettings:(NSMutableDictionary *)outputSettings;
{
if (!(self = [super init]))
{
return nil;
}
_shouldInvalidateAudioSampleWhenDone = NO;
self.enabled = YES;
alreadyFinishedRecording = NO;
videoEncodingIsFinished = NO;
audioEncodingIsFinished = NO;
videoSize = newSize;
movieURL = newMovieURL;
fileType = newFileType;
startTime = kCMTimeInvalid;
_encodingLiveVideo = [[outputSettings objectForKey:@"EncodingLiveVideo"] isKindOfClass:[NSNumber class]] ? [[outputSettings objectForKey:@"EncodingLiveVideo"] boolValue] : YES;
previousFrameTime = kCMTimeNegativeInfinity;
previousAudioTime = kCMTimeNegativeInfinity;
inputRotation = kGPUImageNoRotation;
_movieWriterContext = [[GPUImageContext alloc] init];
[_movieWriterContext useSharegroup:[[[GPUImageContext sharedImageProcessingContext] context] sharegroup]];
runSynchronouslyOnContextQueue(_movieWriterContext, ^{
[_movieWriterContext useAsCurrentContext];
if ([GPUImageContext supportsFastTextureUpload])
{
colorSwizzlingProgram = [_movieWriterContext programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImagePassthroughFragmentShaderString];
}
else
{
colorSwizzlingProgram = [_movieWriterContext programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImageColorSwizzlingFragmentShaderString];
}
if (!colorSwizzlingProgram.initialized)
{
[colorSwizzlingProgram addAttribute:@"position"];
[colorSwizzlingProgram addAttribute:@"inputTextureCoordinate"];
if (![colorSwizzlingProgram link])
{
NSString *progLog = [colorSwizzlingProgram programLog];
NSLog(@"Program link log: %@", progLog);
NSString *fragLog = [colorSwizzlingProgram fragmentShaderLog];
NSLog(@"Fragment shader compile log: %@", fragLog);
NSString *vertLog = [colorSwizzlingProgram vertexShaderLog];
NSLog(@"Vertex shader compile log: %@", vertLog);
colorSwizzlingProgram = nil;
NSAssert(NO, @"Filter shader link failed");
}
}
colorSwizzlingPositionAttribute = [colorSwizzlingProgram attributeIndex:@"position"];
colorSwizzlingTextureCoordinateAttribute = [colorSwizzlingProgram attributeIndex:@"inputTextureCoordinate"];
colorSwizzlingInputTextureUniform = [colorSwizzlingProgram uniformIndex:@"inputImageTexture"];
[_movieWriterContext setContextShaderProgram:colorSwizzlingProgram];
glEnableVertexAttribArray(colorSwizzlingPositionAttribute);
glEnableVertexAttribArray(colorSwizzlingTextureCoordinateAttribute);
});
[self initializeMovieWithOutputSettings:outputSettings];
return self;
}
- (void)dealloc;
{
[self destroyDataFBO];
#if !OS_OBJECT_USE_OBJC
if( audioQueue != NULL )
{
dispatch_release(audioQueue);
}
if( videoQueue != NULL )
{
dispatch_release(videoQueue);
}
#endif
}
#pragma mark -
#pragma mark Movie recording
- (void)initializeMovieWithOutputSettings:(NSDictionary *)outputSettings;
{
isRecording = NO;
self.enabled = YES;
NSError *error = nil;
assetWriter = [[AVAssetWriter alloc] initWithURL:movieURL fileType:fileType error:&error];
if (error != nil)
{
NSLog(@"Error: %@", error);
if (failureBlock)
{
failureBlock(error);
}
else
{
if(self.delegate && [self.delegate respondsToSelector:@selector(movieRecordingFailedWithError:)])
{
[self.delegate movieRecordingFailedWithError:error];
}
}
}
// Set this to make sure that a functional movie is produced, even if the recording is cut off mid-stream. Only the last second should be lost in that case.
assetWriter.movieFragmentInterval = CMTimeMakeWithSeconds(1.0, 1000);
// use default output settings if none specified
if (outputSettings == nil)
{
NSMutableDictionary *settings = [[NSMutableDictionary alloc] init];
[settings setObject:AVVideoCodecH264 forKey:AVVideoCodecKey];
[settings setObject:[NSNumber numberWithInt:videoSize.width] forKey:AVVideoWidthKey];
[settings setObject:[NSNumber numberWithInt:videoSize.height] forKey:AVVideoHeightKey];
outputSettings = settings;
}
// custom output settings specified
else
{
NSString *videoCodec = [outputSettings objectForKey:AVVideoCodecKey];
NSNumber *width = [outputSettings objectForKey:AVVideoWidthKey];
NSNumber *height = [outputSettings objectForKey:AVVideoHeightKey];
NSAssert(videoCodec && width && height, @"OutputSettings is missing required parameters.");
if( [outputSettings objectForKey:@"EncodingLiveVideo"] ) {
NSMutableDictionary *tmp = [outputSettings mutableCopy];
[tmp removeObjectForKey:@"EncodingLiveVideo"];
outputSettings = tmp;
}
}
/*
NSDictionary *videoCleanApertureSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:videoSize.width], AVVideoCleanApertureWidthKey,
[NSNumber numberWithInt:videoSize.height], AVVideoCleanApertureHeightKey,
[NSNumber numberWithInt:0], AVVideoCleanApertureHorizontalOffsetKey,
[NSNumber numberWithInt:0], AVVideoCleanApertureVerticalOffsetKey,
nil];
NSDictionary *videoAspectRatioSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:3], AVVideoPixelAspectRatioHorizontalSpacingKey,
[NSNumber numberWithInt:3], AVVideoPixelAspectRatioVerticalSpacingKey,
nil];
NSMutableDictionary * compressionProperties = [[NSMutableDictionary alloc] init];
[compressionProperties setObject:videoCleanApertureSettings forKey:AVVideoCleanApertureKey];
[compressionProperties setObject:videoAspectRatioSettings forKey:AVVideoPixelAspectRatioKey];
[compressionProperties setObject:[NSNumber numberWithInt: 2000000] forKey:AVVideoAverageBitRateKey];
[compressionProperties setObject:[NSNumber numberWithInt: 16] forKey:AVVideoMaxKeyFrameIntervalKey];
[compressionProperties setObject:AVVideoProfileLevelH264Main31 forKey:AVVideoProfileLevelKey];
[outputSettings setObject:compressionProperties forKey:AVVideoCompressionPropertiesKey];
*/
assetWriterVideoInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:outputSettings];
assetWriterVideoInput.expectsMediaDataInRealTime = _encodingLiveVideo;
// You need to use BGRA for the video in order to get realtime encoding. I use a color-swizzling shader to line up glReadPixels' normal RGBA output with the movie input's BGRA.
NSDictionary *sourcePixelBufferAttributesDictionary = [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey,
[NSNumber numberWithInt:videoSize.width], kCVPixelBufferWidthKey,
[NSNumber numberWithInt:videoSize.height], kCVPixelBufferHeightKey,
nil];
// NSDictionary *sourcePixelBufferAttributesDictionary = [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey,
// nil];
assetWriterPixelBufferInput = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:assetWriterVideoInput sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary];
[assetWriter addInput:assetWriterVideoInput];
}
- (void)setEncodingLiveVideo:(BOOL) value
{
_encodingLiveVideo = value;
if (isRecording) {
NSAssert(NO, @"Can not change Encoding Live Video while recording");
}
else
{
assetWriterVideoInput.expectsMediaDataInRealTime = _encodingLiveVideo;
assetWriterAudioInput.expectsMediaDataInRealTime = _encodingLiveVideo;
}
}
- (void)startRecording;
{
alreadyFinishedRecording = NO;
startTime = kCMTimeInvalid;
runSynchronouslyOnContextQueue(_movieWriterContext, ^{
if (audioInputReadyCallback == NULL)
{
[assetWriter startWriting];
}
});
isRecording = YES;
// [assetWriter startSessionAtSourceTime:kCMTimeZero];
}
- (void)startRecordingInOrientation:(CGAffineTransform)orientationTransform;
{
assetWriterVideoInput.transform = orientationTransform;
[self startRecording];
}
- (void)cancelRecording;
{
if (assetWriter.status == AVAssetWriterStatusCompleted)
{
return;
}
isRecording = NO;
runSynchronouslyOnContextQueue(_movieWriterContext, ^{
alreadyFinishedRecording = YES;
if( assetWriter.status == AVAssetWriterStatusWriting && ! videoEncodingIsFinished )
{
videoEncodingIsFinished = YES;
[assetWriterVideoInput markAsFinished];
}
if( assetWriter.status == AVAssetWriterStatusWriting && ! audioEncodingIsFinished )
{
audioEncodingIsFinished = YES;
[assetWriterAudioInput markAsFinished];
}
[assetWriter cancelWriting];
});
}
- (void)finishRecording;
{
[self finishRecordingWithCompletionHandler:NULL];
}
- (void)finishRecordingWithCompletionHandler:(void (^)(void))handler;
{
runSynchronouslyOnContextQueue(_movieWriterContext, ^{
isRecording = NO;
if (assetWriter.status == AVAssetWriterStatusCompleted || assetWriter.status == AVAssetWriterStatusCancelled || assetWriter.status == AVAssetWriterStatusUnknown)
{
if (handler)
runAsynchronouslyOnContextQueue(_movieWriterContext, handler);
return;
}
if( assetWriter.status == AVAssetWriterStatusWriting && ! videoEncodingIsFinished )
{
videoEncodingIsFinished = YES;
[assetWriterVideoInput markAsFinished];
}
if( assetWriter.status == AVAssetWriterStatusWriting && ! audioEncodingIsFinished )
{
audioEncodingIsFinished = YES;
[assetWriterAudioInput markAsFinished];
}
#if (!defined(__IPHONE_6_0) || (__IPHONE_OS_VERSION_MAX_ALLOWED < __IPHONE_6_0))
// Not iOS 6 SDK
[assetWriter finishWriting];
if (handler)
runAsynchronouslyOnContextQueue(_movieWriterContext,handler);
#else
// iOS 6 SDK
if ([assetWriter respondsToSelector:@selector(finishWritingWithCompletionHandler:)]) {
// Running iOS 6
[assetWriter finishWritingWithCompletionHandler:(handler ?: ^{ })];
}
else {
// Not running iOS 6
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wdeprecated-declarations"
[assetWriter finishWriting];
#pragma clang diagnostic pop
if (handler)
runAsynchronouslyOnContextQueue(_movieWriterContext, handler);
}
#endif
});
}
- (void)processAudioBuffer:(CMSampleBufferRef)audioBuffer;
{
if (!isRecording)
{
return;
}
// if (_hasAudioTrack && CMTIME_IS_VALID(startTime))
if (_hasAudioTrack)
{
CFRetain(audioBuffer);
CMTime currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(audioBuffer);
if (CMTIME_IS_INVALID(startTime))
{
runSynchronouslyOnContextQueue(_movieWriterContext, ^{
if ((audioInputReadyCallback == NULL) && (assetWriter.status != AVAssetWriterStatusWriting))
{
[assetWriter startWriting];
}
[assetWriter startSessionAtSourceTime:currentSampleTime];
startTime = currentSampleTime;
});
}
if (!assetWriterAudioInput.readyForMoreMediaData && _encodingLiveVideo)
{
NSLog(@"1: Had to drop an audio frame: %@", CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, currentSampleTime)));
if (_shouldInvalidateAudioSampleWhenDone)
{
CMSampleBufferInvalidate(audioBuffer);
}
CFRelease(audioBuffer);
return;
}
previousAudioTime = currentSampleTime;
//if the consumer wants to do something with the audio samples before writing, let him.
if (self.audioProcessingCallback) {
//need to introspect into the opaque CMBlockBuffer structure to find its raw sample buffers.
CMBlockBufferRef buffer = CMSampleBufferGetDataBuffer(audioBuffer);
CMItemCount numSamplesInBuffer = CMSampleBufferGetNumSamples(audioBuffer);
AudioBufferList audioBufferList;
CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(audioBuffer,
NULL,
&audioBufferList,
sizeof(audioBufferList),
NULL,
NULL,
kCMSampleBufferFlag_AudioBufferList_Assure16ByteAlignment,
&buffer
);
//passing a live pointer to the audio buffers, try to process them in-place or we might have syncing issues.
for (int bufferCount=0; bufferCount < audioBufferList.mNumberBuffers; bufferCount++) {
SInt16 *samples = (SInt16 *)audioBufferList.mBuffers[bufferCount].mData;
self.audioProcessingCallback(&samples, numSamplesInBuffer);
}
}
// NSLog(@"Recorded audio sample time: %lld, %d, %lld", currentSampleTime.value, currentSampleTime.timescale, currentSampleTime.epoch);
void(^write)() = ^() {
while( ! assetWriterAudioInput.readyForMoreMediaData && ! _encodingLiveVideo && ! audioEncodingIsFinished ) {
NSDate *maxDate = [NSDate dateWithTimeIntervalSinceNow:0.5];
//NSLog(@"audio waiting...");
[[NSRunLoop currentRunLoop] runUntilDate:maxDate];
}
if (!assetWriterAudioInput.readyForMoreMediaData)
{
NSLog(@"2: Had to drop an audio frame %@", CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, currentSampleTime)));
}
else if(assetWriter.status == AVAssetWriterStatusWriting)
{
if (![assetWriterAudioInput appendSampleBuffer:audioBuffer])
NSLog(@"Problem appending audio buffer at time: %@", CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, currentSampleTime)));
}
else
{
//NSLog(@"Wrote an audio frame %@", CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, currentSampleTime)));
}
if (_shouldInvalidateAudioSampleWhenDone)
{
CMSampleBufferInvalidate(audioBuffer);
}
CFRelease(audioBuffer);
};
// runAsynchronouslyOnContextQueue(_movieWriterContext, write);
if( _encodingLiveVideo )
{
runAsynchronouslyOnContextQueue(_movieWriterContext, write);
}
else
{
write();
}
}
}
- (void)enableSynchronizationCallbacks;
{
if (videoInputReadyCallback != NULL)
{
if( assetWriter.status != AVAssetWriterStatusWriting )
{
[assetWriter startWriting];
}
videoQueue = dispatch_queue_create("com.sunsetlakesoftware.GPUImage.videoReadingQueue", NULL);
[assetWriterVideoInput requestMediaDataWhenReadyOnQueue:videoQueue usingBlock:^{
if( _paused )
{
//NSLog(@"video requestMediaDataWhenReadyOnQueue paused");
// if we don't sleep, we'll get called back almost immediately, chewing up CPU
usleep(10000);
return;
}
//NSLog(@"video requestMediaDataWhenReadyOnQueue begin");
while( assetWriterVideoInput.readyForMoreMediaData && ! _paused )
{
if( videoInputReadyCallback && ! videoInputReadyCallback() && ! videoEncodingIsFinished )
{
runAsynchronouslyOnContextQueue(_movieWriterContext, ^{
if( assetWriter.status == AVAssetWriterStatusWriting && ! videoEncodingIsFinished )
{
videoEncodingIsFinished = YES;
[assetWriterVideoInput markAsFinished];
}
});
}
}
//NSLog(@"video requestMediaDataWhenReadyOnQueue end");
}];
}
if (audioInputReadyCallback != NULL)
{
audioQueue = dispatch_queue_create("com.sunsetlakesoftware.GPUImage.audioReadingQueue", NULL);
[assetWriterAudioInput requestMediaDataWhenReadyOnQueue:audioQueue usingBlock:^{
if( _paused )
{
//NSLog(@"audio requestMediaDataWhenReadyOnQueue paused");
// if we don't sleep, we'll get called back almost immediately, chewing up CPU
usleep(10000);
return;
}
//NSLog(@"audio requestMediaDataWhenReadyOnQueue begin");
while( assetWriterAudioInput.readyForMoreMediaData && ! _paused )
{
if( audioInputReadyCallback && ! audioInputReadyCallback() && ! audioEncodingIsFinished )
{
runAsynchronouslyOnContextQueue(_movieWriterContext, ^{
if( assetWriter.status == AVAssetWriterStatusWriting && ! audioEncodingIsFinished )
{
audioEncodingIsFinished = YES;
[assetWriterAudioInput markAsFinished];
}
});
}
}
//NSLog(@"audio requestMediaDataWhenReadyOnQueue end");
}];
}
}
#pragma mark -
#pragma mark Frame rendering
- (void)createDataFBO;
{
glActiveTexture(GL_TEXTURE1);
glGenFramebuffers(1, &movieFramebuffer);
glBindFramebuffer(GL_FRAMEBUFFER, movieFramebuffer);
if ([GPUImageContext supportsFastTextureUpload])
{
// Code originally sourced from path_to_url
CVPixelBufferPoolCreatePixelBuffer (NULL, [assetWriterPixelBufferInput pixelBufferPool], &renderTarget);
/* AVAssetWriter will use BT.601 conversion matrix for RGB to YCbCr conversion
* regardless of the kCVImageBufferYCbCrMatrixKey value.
* Tagging the resulting video file as BT.601, is the best option right now.
* Creating a proper BT.709 video is not possible at the moment.
*/
CVBufferSetAttachment(renderTarget, kCVImageBufferColorPrimariesKey, kCVImageBufferColorPrimaries_ITU_R_709_2, kCVAttachmentMode_ShouldPropagate);
CVBufferSetAttachment(renderTarget, kCVImageBufferYCbCrMatrixKey, kCVImageBufferYCbCrMatrix_ITU_R_601_4, kCVAttachmentMode_ShouldPropagate);
CVBufferSetAttachment(renderTarget, kCVImageBufferTransferFunctionKey, kCVImageBufferTransferFunction_ITU_R_709_2, kCVAttachmentMode_ShouldPropagate);
CVOpenGLESTextureCacheCreateTextureFromImage (kCFAllocatorDefault, [_movieWriterContext coreVideoTextureCache], renderTarget,
NULL, // texture attributes
GL_TEXTURE_2D,
GL_RGBA, // opengl format
(int)videoSize.width,
(int)videoSize.height,
GL_BGRA, // native iOS format
GL_UNSIGNED_BYTE,
0,
&renderTexture);
glBindTexture(CVOpenGLESTextureGetTarget(renderTexture), CVOpenGLESTextureGetName(renderTexture));
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, CVOpenGLESTextureGetName(renderTexture), 0);
}
else
{
glGenRenderbuffers(1, &movieRenderbuffer);
glBindRenderbuffer(GL_RENDERBUFFER, movieRenderbuffer);
glRenderbufferStorage(GL_RENDERBUFFER, GL_RGBA8_OES, (int)videoSize.width, (int)videoSize.height);
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, movieRenderbuffer);
}
GLenum status = glCheckFramebufferStatus(GL_FRAMEBUFFER);
NSAssert(status == GL_FRAMEBUFFER_COMPLETE, @"Incomplete filter FBO: %d", status);
}
- (void)destroyDataFBO;
{
runSynchronouslyOnContextQueue(_movieWriterContext, ^{
[_movieWriterContext useAsCurrentContext];
if (movieFramebuffer)
{
glDeleteFramebuffers(1, &movieFramebuffer);
movieFramebuffer = 0;
}
if (movieRenderbuffer)
{
glDeleteRenderbuffers(1, &movieRenderbuffer);
movieRenderbuffer = 0;
}
if ([GPUImageContext supportsFastTextureUpload])
{
if (renderTexture)
{
CFRelease(renderTexture);
}
if (renderTarget)
{
CVPixelBufferRelease(renderTarget);
}
}
});
}
- (void)setFilterFBO;
{
if (!movieFramebuffer)
{
[self createDataFBO];
}
glBindFramebuffer(GL_FRAMEBUFFER, movieFramebuffer);
glViewport(0, 0, (int)videoSize.width, (int)videoSize.height);
}
- (void)renderAtInternalSizeUsingFramebuffer:(GPUImageFramebuffer *)inputFramebufferToUse;
{
[_movieWriterContext useAsCurrentContext];
[self setFilterFBO];
[_movieWriterContext setContextShaderProgram:colorSwizzlingProgram];
glClearColor(1.0f, 0.0f, 0.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
// This needs to be flipped to write out to video correctly
static const GLfloat squareVertices[] = {
-1.0f, -1.0f,
1.0f, -1.0f,
-1.0f, 1.0f,
1.0f, 1.0f,
};
const GLfloat *textureCoordinates = [GPUImageFilter textureCoordinatesForRotation:inputRotation];
glActiveTexture(GL_TEXTURE4);
glBindTexture(GL_TEXTURE_2D, [inputFramebufferToUse texture]);
glUniform1i(colorSwizzlingInputTextureUniform, 4);
// NSLog(@"Movie writer framebuffer: %@", inputFramebufferToUse);
glVertexAttribPointer(colorSwizzlingPositionAttribute, 2, GL_FLOAT, 0, 0, squareVertices);
glVertexAttribPointer(colorSwizzlingTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
glFinish();
}
#pragma mark -
#pragma mark GPUImageInput protocol
- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex;
{
if (!isRecording)
{
[firstInputFramebuffer unlock];
return;
}
// Drop frames forced by images and other things with no time constants
// Also, if two consecutive times with the same value are added to the movie, it aborts recording, so I bail on that case
if ( (CMTIME_IS_INVALID(frameTime)) || (CMTIME_COMPARE_INLINE(frameTime, ==, previousFrameTime)) || (CMTIME_IS_INDEFINITE(frameTime)) )
{
[firstInputFramebuffer unlock];
return;
}
if (CMTIME_IS_INVALID(startTime))
{
runSynchronouslyOnContextQueue(_movieWriterContext, ^{
if ((videoInputReadyCallback == NULL) && (assetWriter.status != AVAssetWriterStatusWriting))
{
[assetWriter startWriting];
}
[assetWriter startSessionAtSourceTime:frameTime];
startTime = frameTime;
});
}
GPUImageFramebuffer *inputFramebufferForBlock = firstInputFramebuffer;
glFinish();
runAsynchronouslyOnContextQueue(_movieWriterContext, ^{
if (!assetWriterVideoInput.readyForMoreMediaData && _encodingLiveVideo)
{
[inputFramebufferForBlock unlock];
NSLog(@"1: Had to drop a video frame: %@", CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, frameTime)));
return;
}
// Render the frame with swizzled colors, so that they can be uploaded quickly as BGRA frames
[_movieWriterContext useAsCurrentContext];
[self renderAtInternalSizeUsingFramebuffer:inputFramebufferForBlock];
CVPixelBufferRef pixel_buffer = NULL;
if ([GPUImageContext supportsFastTextureUpload])
{
pixel_buffer = renderTarget;
CVPixelBufferLockBaseAddress(pixel_buffer, 0);
}
else
{
CVReturn status = CVPixelBufferPoolCreatePixelBuffer (NULL, [assetWriterPixelBufferInput pixelBufferPool], &pixel_buffer);
if ((pixel_buffer == NULL) || (status != kCVReturnSuccess))
{
CVPixelBufferRelease(pixel_buffer);
return;
}
else
{
CVPixelBufferLockBaseAddress(pixel_buffer, 0);
GLubyte *pixelBufferData = (GLubyte *)CVPixelBufferGetBaseAddress(pixel_buffer);
glReadPixels(0, 0, videoSize.width, videoSize.height, GL_RGBA, GL_UNSIGNED_BYTE, pixelBufferData);
}
}
void(^write)() = ^() {
while( ! assetWriterVideoInput.readyForMoreMediaData && ! _encodingLiveVideo && ! videoEncodingIsFinished ) {
NSDate *maxDate = [NSDate dateWithTimeIntervalSinceNow:0.1];
// NSLog(@"video waiting...");
[[NSRunLoop currentRunLoop] runUntilDate:maxDate];
}
if (!assetWriterVideoInput.readyForMoreMediaData)
{
NSLog(@"2: Had to drop a video frame: %@", CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, frameTime)));
}
else if(self.assetWriter.status == AVAssetWriterStatusWriting)
{
if (![assetWriterPixelBufferInput appendPixelBuffer:pixel_buffer withPresentationTime:frameTime])
NSLog(@"Problem appending pixel buffer at time: %@", CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, frameTime)));
}
else
{
NSLog(@"Couldn't write a frame");
//NSLog(@"Wrote a video frame: %@", CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, frameTime)));
}
CVPixelBufferUnlockBaseAddress(pixel_buffer, 0);
previousFrameTime = frameTime;
if (![GPUImageContext supportsFastTextureUpload])
{
CVPixelBufferRelease(pixel_buffer);
}
};
write();
[inputFramebufferForBlock unlock];
});
}
- (NSInteger)nextAvailableTextureIndex;
{
return 0;
}
- (void)setInputFramebuffer:(GPUImageFramebuffer *)newInputFramebuffer atIndex:(NSInteger)textureIndex;
{
[newInputFramebuffer lock];
// runSynchronouslyOnContextQueue(_movieWriterContext, ^{
firstInputFramebuffer = newInputFramebuffer;
// });
}
- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;
{
inputRotation = newInputRotation;
}
- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;
{
}
- (CGSize)maximumOutputSize;
{
return videoSize;
}
- (void)endProcessing
{
if (completionBlock)
{
if (!alreadyFinishedRecording)
{
alreadyFinishedRecording = YES;
completionBlock();
}
}
else
{
if (_delegate && [_delegate respondsToSelector:@selector(movieRecordingCompleted)])
{
[_delegate movieRecordingCompleted];
}
}
}
- (BOOL)shouldIgnoreUpdatesToThisTarget;
{
return NO;
}
- (BOOL)wantsMonochromeInput;
{
return NO;
}
- (void)setCurrentlyReceivingMonochromeInput:(BOOL)newValue;
{
}
#pragma mark -
#pragma mark Accessors
- (void)setHasAudioTrack:(BOOL)newValue
{
[self setHasAudioTrack:newValue audioSettings:nil];
}
- (void)setHasAudioTrack:(BOOL)newValue audioSettings:(NSDictionary *)audioOutputSettings;
{
_hasAudioTrack = newValue;
if (_hasAudioTrack)
{
if (_shouldPassthroughAudio)
{
// Do not set any settings so audio will be the same as passthrough
audioOutputSettings = nil;
}
else if (audioOutputSettings == nil)
{
AVAudioSession *sharedAudioSession = [AVAudioSession sharedInstance];
double preferredHardwareSampleRate;
if ([sharedAudioSession respondsToSelector:@selector(sampleRate)])
{
preferredHardwareSampleRate = [sharedAudioSession sampleRate];
}
else
{
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wdeprecated-declarations"
preferredHardwareSampleRate = [[AVAudioSession sharedInstance] currentHardwareSampleRate];
#pragma clang diagnostic pop
}
AudioChannelLayout acl;
bzero( &acl, sizeof(acl));
acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;
audioOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[ NSNumber numberWithInt: kAudioFormatMPEG4AAC], AVFormatIDKey,
[ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey,
[ NSNumber numberWithFloat: preferredHardwareSampleRate ], AVSampleRateKey,
[ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,
//[ NSNumber numberWithInt:AVAudioQualityLow], AVEncoderAudioQualityKey,
[ NSNumber numberWithInt: 64000 ], AVEncoderBitRateKey,
nil];
/*
AudioChannelLayout acl;
bzero( &acl, sizeof(acl));
acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;
audioOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[ NSNumber numberWithInt: kAudioFormatMPEG4AAC ], AVFormatIDKey,
[ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey,
[ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey,
[ NSNumber numberWithInt: 64000 ], AVEncoderBitRateKey,
[ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,
nil];*/
}
assetWriterAudioInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:audioOutputSettings];
[assetWriter addInput:assetWriterAudioInput];
assetWriterAudioInput.expectsMediaDataInRealTime = _encodingLiveVideo;
}
else
{
// Remove audio track if it exists
}
}
- (NSArray*)metaData {
return assetWriter.metadata;
}
- (void)setMetaData:(NSArray*)metaData {
assetWriter.metadata = metaData;
}
- (CMTime)duration {
if( ! CMTIME_IS_VALID(startTime) )
return kCMTimeZero;
if( ! CMTIME_IS_NEGATIVE_INFINITY(previousFrameTime) )
return CMTimeSubtract(previousFrameTime, startTime);
if( ! CMTIME_IS_NEGATIVE_INFINITY(previousAudioTime) )
return CMTimeSubtract(previousAudioTime, startTime);
return kCMTimeZero;
}
- (CGAffineTransform)transform {
return assetWriterVideoInput.transform;
}
- (void)setTransform:(CGAffineTransform)transform {
assetWriterVideoInput.transform = transform;
}
- (AVAssetWriter*)assetWriter {
return assetWriter;
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/iOS/GPUImageMovieWriter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 7,149 |
```objective-c
#import "GPUImageContext.h"
#import <OpenGLES/EAGLDrawable.h>
#import <AVFoundation/AVFoundation.h>
#define MAXSHADERPROGRAMSALLOWEDINCACHE 40
@interface GPUImageContext()
{
NSMutableDictionary *shaderProgramCache;
NSMutableArray *shaderProgramUsageHistory;
EAGLSharegroup *_sharegroup;
}
@end
@implementation GPUImageContext
@synthesize context = _context;
@synthesize currentShaderProgram = _currentShaderProgram;
@synthesize contextQueue = _contextQueue;
@synthesize coreVideoTextureCache = _coreVideoTextureCache;
@synthesize framebufferCache = _framebufferCache;
static void *openGLESContextQueueKey;
- (id)init;
{
if (!(self = [super init]))
{
return nil;
}
openGLESContextQueueKey = &openGLESContextQueueKey;
_contextQueue = dispatch_queue_create("com.sunsetlakesoftware.GPUImage.openGLESContextQueue", NULL);
#if OS_OBJECT_USE_OBJC
dispatch_queue_set_specific(_contextQueue, openGLESContextQueueKey, (__bridge void *)self, NULL);
#endif
shaderProgramCache = [[NSMutableDictionary alloc] init];
shaderProgramUsageHistory = [[NSMutableArray alloc] init];
return self;
}
+ (void *)contextKey {
return openGLESContextQueueKey;
}
// Based on Colin Wheeler's example here: path_to_url
+ (GPUImageContext *)sharedImageProcessingContext;
{
static dispatch_once_t pred;
static GPUImageContext *sharedImageProcessingContext = nil;
dispatch_once(&pred, ^{
sharedImageProcessingContext = [[[self class] alloc] init];
});
return sharedImageProcessingContext;
}
+ (dispatch_queue_t)sharedContextQueue;
{
return [[self sharedImageProcessingContext] contextQueue];
}
+ (GPUImageFramebufferCache *)sharedFramebufferCache;
{
return [[self sharedImageProcessingContext] framebufferCache];
}
+ (void)useImageProcessingContext;
{
[[GPUImageContext sharedImageProcessingContext] useAsCurrentContext];
}
- (void)useAsCurrentContext;
{
EAGLContext *imageProcessingContext = [self context];
if ([EAGLContext currentContext] != imageProcessingContext)
{
[EAGLContext setCurrentContext:imageProcessingContext];
}
}
+ (void)setActiveShaderProgram:(GLProgram *)shaderProgram;
{
GPUImageContext *sharedContext = [GPUImageContext sharedImageProcessingContext];
[sharedContext setContextShaderProgram:shaderProgram];
}
- (void)setContextShaderProgram:(GLProgram *)shaderProgram;
{
EAGLContext *imageProcessingContext = [self context];
if ([EAGLContext currentContext] != imageProcessingContext)
{
[EAGLContext setCurrentContext:imageProcessingContext];
}
if (self.currentShaderProgram != shaderProgram)
{
self.currentShaderProgram = shaderProgram;
[shaderProgram use];
}
}
+ (GLint)maximumTextureSizeForThisDevice;
{
static dispatch_once_t pred;
static GLint maxTextureSize = 0;
dispatch_once(&pred, ^{
[self useImageProcessingContext];
glGetIntegerv(GL_MAX_TEXTURE_SIZE, &maxTextureSize);
});
return maxTextureSize;
}
+ (GLint)maximumTextureUnitsForThisDevice;
{
static dispatch_once_t pred;
static GLint maxTextureUnits = 0;
dispatch_once(&pred, ^{
[self useImageProcessingContext];
glGetIntegerv(GL_MAX_TEXTURE_IMAGE_UNITS, &maxTextureUnits);
});
return maxTextureUnits;
}
+ (GLint)maximumVaryingVectorsForThisDevice;
{
static dispatch_once_t pred;
static GLint maxVaryingVectors = 0;
dispatch_once(&pred, ^{
[self useImageProcessingContext];
glGetIntegerv(GL_MAX_VARYING_VECTORS, &maxVaryingVectors);
});
return maxVaryingVectors;
}
+ (BOOL)deviceSupportsOpenGLESExtension:(NSString *)extension;
{
static dispatch_once_t pred;
static NSArray *extensionNames = nil;
// Cache extensions for later quick reference, since this won't change for a given device
dispatch_once(&pred, ^{
[GPUImageContext useImageProcessingContext];
NSString *extensionsString = [NSString stringWithCString:(const char *)glGetString(GL_EXTENSIONS) encoding:NSASCIIStringEncoding];
extensionNames = [extensionsString componentsSeparatedByString:@" "];
});
return [extensionNames containsObject:extension];
}
// path_to_url
+ (BOOL)deviceSupportsRedTextures;
{
static dispatch_once_t pred;
static BOOL supportsRedTextures = NO;
dispatch_once(&pred, ^{
supportsRedTextures = [GPUImageContext deviceSupportsOpenGLESExtension:@"GL_EXT_texture_rg"];
});
return supportsRedTextures;
}
+ (BOOL)deviceSupportsFramebufferReads;
{
static dispatch_once_t pred;
static BOOL supportsFramebufferReads = NO;
dispatch_once(&pred, ^{
supportsFramebufferReads = [GPUImageContext deviceSupportsOpenGLESExtension:@"GL_EXT_shader_framebuffer_fetch"];
});
return supportsFramebufferReads;
}
+ (CGSize)sizeThatFitsWithinATextureForSize:(CGSize)inputSize;
{
GLint maxTextureSize = [self maximumTextureSizeForThisDevice];
if ( (inputSize.width < maxTextureSize) && (inputSize.height < maxTextureSize) )
{
return inputSize;
}
CGSize adjustedSize;
if (inputSize.width > inputSize.height)
{
adjustedSize.width = (CGFloat)maxTextureSize;
adjustedSize.height = ((CGFloat)maxTextureSize / inputSize.width) * inputSize.height;
}
else
{
adjustedSize.height = (CGFloat)maxTextureSize;
adjustedSize.width = ((CGFloat)maxTextureSize / inputSize.height) * inputSize.width;
}
return adjustedSize;
}
- (void)presentBufferForDisplay;
{
[self.context presentRenderbuffer:GL_RENDERBUFFER];
}
- (GLProgram *)programForVertexShaderString:(NSString *)vertexShaderString fragmentShaderString:(NSString *)fragmentShaderString;
{
NSString *lookupKeyForShaderProgram = [NSString stringWithFormat:@"V: %@ - F: %@", vertexShaderString, fragmentShaderString];
GLProgram *programFromCache = [shaderProgramCache objectForKey:lookupKeyForShaderProgram];
if (programFromCache == nil)
{
programFromCache = [[GLProgram alloc] initWithVertexShaderString:vertexShaderString fragmentShaderString:fragmentShaderString];
[shaderProgramCache setObject:programFromCache forKey:lookupKeyForShaderProgram];
// [shaderProgramUsageHistory addObject:lookupKeyForShaderProgram];
// if ([shaderProgramUsageHistory count] >= MAXSHADERPROGRAMSALLOWEDINCACHE)
// {
// for (NSUInteger currentShaderProgramRemovedFromCache = 0; currentShaderProgramRemovedFromCache < 10; currentShaderProgramRemovedFromCache++)
// {
// NSString *shaderProgramToRemoveFromCache = [shaderProgramUsageHistory objectAtIndex:0];
// [shaderProgramUsageHistory removeObjectAtIndex:0];
// [shaderProgramCache removeObjectForKey:shaderProgramToRemoveFromCache];
// }
// }
}
return programFromCache;
}
- (void)useSharegroup:(EAGLSharegroup *)sharegroup;
{
NSAssert(_context == nil, @"Unable to use a share group when the context has already been created. Call this method before you use the context for the first time.");
_sharegroup = sharegroup;
}
- (EAGLContext *)createContext;
{
EAGLContext *context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2 sharegroup:_sharegroup];
NSAssert(context != nil, @"Unable to create an OpenGL ES 2.0 context. The GPUImage framework requires OpenGL ES 2.0 support to work.");
return context;
}
#pragma mark -
#pragma mark Manage fast texture upload
+ (BOOL)supportsFastTextureUpload;
{
#if TARGET_IPHONE_SIMULATOR
return NO;
#else
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wtautological-pointer-compare"
return (CVOpenGLESTextureCacheCreate != NULL);
#pragma clang diagnostic pop
#endif
}
#pragma mark -
#pragma mark Accessors
- (EAGLContext *)context;
{
if (_context == nil)
{
_context = [self createContext];
[EAGLContext setCurrentContext:_context];
// Set up a few global settings for the image processing pipeline
glDisable(GL_DEPTH_TEST);
}
return _context;
}
- (CVOpenGLESTextureCacheRef)coreVideoTextureCache;
{
if (_coreVideoTextureCache == NULL)
{
#if defined(__IPHONE_6_0)
CVReturn err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, [self context], NULL, &_coreVideoTextureCache);
#else
CVReturn err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, (__bridge void *)[self context], NULL, &_coreVideoTextureCache);
#endif
if (err)
{
NSAssert(NO, @"Error at CVOpenGLESTextureCacheCreate %d", err);
}
}
return _coreVideoTextureCache;
}
- (GPUImageFramebufferCache *)framebufferCache;
{
if (_framebufferCache == nil)
{
_framebufferCache = [[GPUImageFramebufferCache alloc] init];
}
return _framebufferCache;
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/iOS/GPUImageContext.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 2,056 |
```objective-c
//
// GPUImagePicture+TextureSubimage.m
// GPUImage
//
// Created by Jack Wu on 2014-05-28.
//
#import "GPUImagePicture+TextureSubimage.h"
@implementation GPUImagePicture (TextureSubimage)
- (void)replaceTextureWithSubimage:(UIImage*)subimage {
return [self replaceTextureWithSubCGImage:[subimage CGImage]];
}
- (void)replaceTextureWithSubCGImage:(CGImageRef)subimageSource {
CGRect rect = (CGRect) {.origin = CGPointZero, .size = (CGSize){.width = CGImageGetWidth(subimageSource), .height = CGImageGetHeight(subimageSource)}};
return [self replaceTextureWithSubCGImage:subimageSource inRect:rect];
}
- (void)replaceTextureWithSubimage:(UIImage*)subimage inRect:(CGRect)subRect {
return [self replaceTextureWithSubCGImage:[subimage CGImage] inRect:subRect];
}
- (void)replaceTextureWithSubCGImage:(CGImageRef)subimageSource inRect:(CGRect)subRect {
NSAssert(outputFramebuffer, @"Picture must be initialized first before replacing subtexture");
NSAssert(self.framebufferForOutput.textureOptions.internalFormat == GL_RGBA, @"For replacing subtexture the internal texture format must be GL_RGBA.");
CGRect subimageRect = (CGRect){.origin = CGPointZero, .size = (CGSize){.width = CGImageGetWidth(subimageSource), .height = CGImageGetHeight(subimageSource)}};
NSAssert(!CGRectIsEmpty(subimageRect), @"Passed sub image must not be empty - it should be at least 1px tall and wide");
NSAssert(!CGRectIsEmpty(subRect), @"Passed sub rect must not be empty");
NSAssert(CGSizeEqualToSize(subimageRect.size, subRect.size), @"Subimage size must match the size of sub rect");
// We don't have to worry about scaling the subimage or finding a power of two size.
// The initialization has taken care of that for us.
dispatch_semaphore_signal(imageUpdateSemaphore);
BOOL shouldRedrawUsingCoreGraphics = NO;
// Since internal format is always RGBA, we need the input data in RGBA as well.
CGBitmapInfo bitmapInfo = CGImageGetBitmapInfo(subimageSource);
CGBitmapInfo byteOrderInfo = bitmapInfo & kCGBitmapByteOrderMask;
if (byteOrderInfo != kCGBitmapByteOrderDefault && byteOrderInfo != kCGBitmapByteOrder32Big) {
shouldRedrawUsingCoreGraphics = YES;
}
else {
CGImageAlphaInfo alphaInfo = bitmapInfo & kCGBitmapAlphaInfoMask;
if (alphaInfo != kCGImageAlphaPremultipliedLast && alphaInfo != kCGImageAlphaLast && alphaInfo != kCGImageAlphaNoneSkipLast) {
shouldRedrawUsingCoreGraphics = YES;
}
}
GLubyte *imageData = NULL;
CFDataRef dataFromImageDataProvider;
if (shouldRedrawUsingCoreGraphics)
{
// For resized or incompatible image: redraw
imageData = (GLubyte *) calloc(1, (int)subimageRect.size.width * (int)subimageRect.size.height * 4);
CGColorSpaceRef genericRGBColorspace = CGColorSpaceCreateDeviceRGB();
CGContextRef imageContext = CGBitmapContextCreate(imageData, (size_t)subimageRect.size.width, (size_t)subimageRect.size.height, 8, (size_t)subimageRect.size.width * 4, genericRGBColorspace, kCGBitmapByteOrderDefault | kCGImageAlphaPremultipliedLast);
CGContextDrawImage(imageContext, CGRectMake(0.0, 0.0, subimageRect.size.width, subimageRect.size.height), subimageSource);
CGContextRelease(imageContext);
CGColorSpaceRelease(genericRGBColorspace);
}
else
{
// Access the raw image bytes directly
dataFromImageDataProvider = CGDataProviderCopyData(CGImageGetDataProvider(subimageSource));
imageData = (GLubyte *)CFDataGetBytePtr(dataFromImageDataProvider);
}
runSynchronouslyOnVideoProcessingQueue(^{
[GPUImageContext useImageProcessingContext];
[outputFramebuffer disableReferenceCounting];
glBindTexture(GL_TEXTURE_2D, [outputFramebuffer texture]);
// no need to use self.outputTextureOptions here since pictures need this texture formats and type
glTexSubImage2D(GL_TEXTURE_2D, 0, subRect.origin.x, subRect.origin.y, (GLint)subRect.size.width, subRect.size.height, GL_RGBA, GL_UNSIGNED_BYTE, imageData);
if (self.shouldSmoothlyScaleOutput)
{
glGenerateMipmap(GL_TEXTURE_2D);
}
glBindTexture(GL_TEXTURE_2D, 0);
});
if (shouldRedrawUsingCoreGraphics)
{
free(imageData);
}
else
{
CFRelease(dataFromImageDataProvider);
}
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/iOS/GPUImagePicture+TextureSubimage.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 1,088 |
```objective-c
#import <UIKit/UIKit.h>
//! Project version number for GPUImageFramework.
FOUNDATION_EXPORT double GPUImageFrameworkVersionNumber;
//! Project version string for GPUImageFramework.
FOUNDATION_EXPORT const unsigned char GPUImageFrameworkVersionString[];
#import <GPUImage/GLProgram.h>
// Base classes
#import <GPUImage/GPUImageContext.h>
#import <GPUImage/GPUImageOutput.h>
#import <GPUImage/GPUImageView.h>
#import <GPUImage/GPUImageVideoCamera.h>
#import <GPUImage/GPUImageStillCamera.h>
#import <GPUImage/GPUImageMovie.h>
#import <GPUImage/GPUImagePicture.h>
#import <GPUImage/GPUImageRawDataInput.h>
#import <GPUImage/GPUImageRawDataOutput.h>
#import <GPUImage/GPUImageMovieWriter.h>
#import <GPUImage/GPUImageFilterPipeline.h>
#import <GPUImage/GPUImageTextureOutput.h>
#import <GPUImage/GPUImageFilterGroup.h>
#import <GPUImage/GPUImageTextureInput.h>
#import <GPUImage/GPUImageUIElement.h>
#import <GPUImage/GPUImageBuffer.h>
#import <GPUImage/GPUImageFramebuffer.h>
#import <GPUImage/GPUImageFramebufferCache.h>
// Filters
#import <GPUImage/GPUImageFilter.h>
#import <GPUImage/GPUImageTwoInputFilter.h>
#import <GPUImage/GPUImagePixellateFilter.h>
#import <GPUImage/GPUImagePixellatePositionFilter.h>
#import <GPUImage/GPUImageSepiaFilter.h>
#import <GPUImage/GPUImageColorInvertFilter.h>
#import <GPUImage/GPUImageSaturationFilter.h>
#import <GPUImage/GPUImageContrastFilter.h>
#import <GPUImage/GPUImageExposureFilter.h>
#import <GPUImage/GPUImageBrightnessFilter.h>
#import <GPUImage/GPUImageLevelsFilter.h>
#import <GPUImage/GPUImageSharpenFilter.h>
#import <GPUImage/GPUImageGammaFilter.h>
#import <GPUImage/GPUImageSobelEdgeDetectionFilter.h>
#import <GPUImage/GPUImageSketchFilter.h>
#import <GPUImage/GPUImageToonFilter.h>
#import <GPUImage/GPUImageSmoothToonFilter.h>
#import <GPUImage/GPUImageMultiplyBlendFilter.h>
#import <GPUImage/GPUImageDissolveBlendFilter.h>
#import <GPUImage/GPUImageKuwaharaFilter.h>
#import <GPUImage/GPUImageKuwaharaRadius3Filter.h>
#import <GPUImage/GPUImageVignetteFilter.h>
#import <GPUImage/GPUImageGaussianBlurFilter.h>
#import <GPUImage/GPUImageGaussianBlurPositionFilter.h>
#import <GPUImage/GPUImageGaussianSelectiveBlurFilter.h>
#import <GPUImage/GPUImageOverlayBlendFilter.h>
#import <GPUImage/GPUImageDarkenBlendFilter.h>
#import <GPUImage/GPUImageLightenBlendFilter.h>
#import <GPUImage/GPUImageSwirlFilter.h>
#import <GPUImage/GPUImageSourceOverBlendFilter.h>
#import <GPUImage/GPUImageColorBurnBlendFilter.h>
#import <GPUImage/GPUImageColorDodgeBlendFilter.h>
#import <GPUImage/GPUImageScreenBlendFilter.h>
#import <GPUImage/GPUImageExclusionBlendFilter.h>
#import <GPUImage/GPUImageDifferenceBlendFilter.h>
#import <GPUImage/GPUImageSubtractBlendFilter.h>
#import <GPUImage/GPUImageHardLightBlendFilter.h>
#import <GPUImage/GPUImageSoftLightBlendFilter.h>
#import <GPUImage/GPUImageColorBlendFilter.h>
#import <GPUImage/GPUImageHueBlendFilter.h>
#import <GPUImage/GPUImageSaturationBlendFilter.h>
#import <GPUImage/GPUImageLuminosityBlendFilter.h>
#import <GPUImage/GPUImageCropFilter.h>
#import <GPUImage/GPUImageGrayscaleFilter.h>
#import <GPUImage/GPUImageTransformFilter.h>
#import <GPUImage/GPUImageChromaKeyBlendFilter.h>
#import <GPUImage/GPUImageHazeFilter.h>
#import <GPUImage/GPUImageLuminanceThresholdFilter.h>
#import <GPUImage/GPUImagePosterizeFilter.h>
#import <GPUImage/GPUImageBoxBlurFilter.h>
#import <GPUImage/GPUImageAdaptiveThresholdFilter.h>
#import <GPUImage/GPUImageUnsharpMaskFilter.h>
#import <GPUImage/GPUImageBulgeDistortionFilter.h>
#import <GPUImage/GPUImagePinchDistortionFilter.h>
#import <GPUImage/GPUImageCrosshatchFilter.h>
#import <GPUImage/GPUImageCGAColorspaceFilter.h>
#import <GPUImage/GPUImagePolarPixellateFilter.h>
#import <GPUImage/GPUImageStretchDistortionFilter.h>
#import <GPUImage/GPUImagePerlinNoiseFilter.h>
#import <GPUImage/GPUImageJFAVoronoiFilter.h>
#import <GPUImage/GPUImageVoronoiConsumerFilter.h>
#import <GPUImage/GPUImageMosaicFilter.h>
#import <GPUImage/GPUImageTiltShiftFilter.h>
#import <GPUImage/GPUImage3x3ConvolutionFilter.h>
#import <GPUImage/GPUImageEmbossFilter.h>
#import <GPUImage/GPUImageCannyEdgeDetectionFilter.h>
#import <GPUImage/GPUImageThresholdEdgeDetectionFilter.h>
#import <GPUImage/GPUImageMaskFilter.h>
#import <GPUImage/GPUImageHistogramFilter.h>
#import <GPUImage/GPUImageHistogramGenerator.h>
#import <GPUImage/GPUImagePrewittEdgeDetectionFilter.h>
#import <GPUImage/GPUImageXYDerivativeFilter.h>
#import <GPUImage/GPUImageHarrisCornerDetectionFilter.h>
#import <GPUImage/GPUImageAlphaBlendFilter.h>
#import <GPUImage/GPUImageNormalBlendFilter.h>
#import <GPUImage/GPUImageNonMaximumSuppressionFilter.h>
#import <GPUImage/GPUImageRGBFilter.h>
#import <GPUImage/GPUImageMedianFilter.h>
#import <GPUImage/GPUImageBilateralFilter.h>
#import <GPUImage/GPUImageCrosshairGenerator.h>
#import <GPUImage/GPUImageToneCurveFilter.h>
#import <GPUImage/GPUImageNobleCornerDetectionFilter.h>
#import <GPUImage/GPUImageShiTomasiFeatureDetectionFilter.h>
#import <GPUImage/GPUImageErosionFilter.h>
#import <GPUImage/GPUImageRGBErosionFilter.h>
#import <GPUImage/GPUImageDilationFilter.h>
#import <GPUImage/GPUImageRGBDilationFilter.h>
#import <GPUImage/GPUImageOpeningFilter.h>
#import <GPUImage/GPUImageRGBOpeningFilter.h>
#import <GPUImage/GPUImageClosingFilter.h>
#import <GPUImage/GPUImageRGBClosingFilter.h>
#import <GPUImage/GPUImageColorPackingFilter.h>
#import <GPUImage/GPUImageSphereRefractionFilter.h>
#import <GPUImage/GPUImageMonochromeFilter.h>
#import <GPUImage/GPUImageOpacityFilter.h>
#import <GPUImage/GPUImageHighlightShadowFilter.h>
#import <GPUImage/GPUImageFalseColorFilter.h>
#import <GPUImage/GPUImageHSBFilter.h>
#import <GPUImage/GPUImageHueFilter.h>
#import <GPUImage/GPUImageGlassSphereFilter.h>
#import <GPUImage/GPUImageLookupFilter.h>
#import <GPUImage/GPUImageAmatorkaFilter.h>
#import <GPUImage/GPUImageMissEtikateFilter.h>
#import <GPUImage/GPUImageSoftEleganceFilter.h>
#import <GPUImage/GPUImageAddBlendFilter.h>
#import <GPUImage/GPUImageDivideBlendFilter.h>
#import <GPUImage/GPUImagePolkaDotFilter.h>
#import <GPUImage/GPUImageLocalBinaryPatternFilter.h>
#import <GPUImage/GPUImageLanczosResamplingFilter.h>
#import <GPUImage/GPUImageAverageColor.h>
#import <GPUImage/GPUImageSolidColorGenerator.h>
#import <GPUImage/GPUImageLuminosity.h>
#import <GPUImage/GPUImageAverageLuminanceThresholdFilter.h>
#import <GPUImage/GPUImageWhiteBalanceFilter.h>
#import <GPUImage/GPUImageChromaKeyFilter.h>
#import <GPUImage/GPUImageLowPassFilter.h>
#import <GPUImage/GPUImageHighPassFilter.h>
#import <GPUImage/GPUImageMotionDetector.h>
#import <GPUImage/GPUImageHalftoneFilter.h>
#import <GPUImage/GPUImageThresholdedNonMaximumSuppressionFilter.h>
#import <GPUImage/GPUImageHoughTransformLineDetector.h>
#import <GPUImage/GPUImageParallelCoordinateLineTransformFilter.h>
#import <GPUImage/GPUImageThresholdSketchFilter.h>
#import <GPUImage/GPUImageLineGenerator.h>
#import <GPUImage/GPUImageLinearBurnBlendFilter.h>
#import <GPUImage/GPUImageGaussianBlurPositionFilter.h>
#import <GPUImage/GPUImagePixellatePositionFilter.h>
#import <GPUImage/GPUImageTwoInputCrossTextureSamplingFilter.h>
#import <GPUImage/GPUImagePoissonBlendFilter.h>
#import <GPUImage/GPUImageMotionBlurFilter.h>
#import <GPUImage/GPUImageZoomBlurFilter.h>
#import <GPUImage/GPUImageLaplacianFilter.h>
#import <GPUImage/GPUImageiOSBlurFilter.h>
#import <GPUImage/GPUImageLuminanceRangeFilter.h>
#import <GPUImage/GPUImageDirectionalNonMaximumSuppressionFilter.h>
#import <GPUImage/GPUImageDirectionalSobelEdgeDetectionFilter.h>
#import <GPUImage/GPUImageSingleComponentGaussianBlurFilter.h>
#import <GPUImage/GPUImageThreeInputFilter.h>
#import <GPUImage/GPUImageWeakPixelInclusionFilter.h>
#import <GPUImage/GPUImageFASTCornerDetectionFilter.h>
#import <GPUImage/GPUImageMovieComposition.h>
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/iOS/Framework/GPUImageFramework.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 2,064 |
```objective-c
/*!
@header WMPlayer.h
@abstract Githubpath_to_url
CSDN:path_to_url
@author Created by zhengwenming on 16/1/24
@version 1.00 16/1/24 Creation()
*/
#import <UIKit/UIKit.h>
#import <AVFoundation/AVFoundation.h>
#import <MediaPlayer/MediaPlayer.h>
#import <AVKit/AVKit.h>
#import "WMPlayerModel.h"
#import "FastForwardView.h"
#import "WMLightView.h"
//***********************************************************
//
typedef NS_ENUM(NSInteger, WMPlayerState) {
WMPlayerStateFailed, //
WMPlayerStateBuffering, //
WMPlayerStatePlaying, //
WMPlayerStateStopped, //
WMPlayerStateFinished, //
WMPlayerStatePause, //
};
typedef NS_ENUM(NSUInteger, WMPlayerViewState) {
PlayerViewStateSmall,
PlayerViewStateFullScreen,
PlayerViewStateAnimating,
};
// playerLayer
typedef NS_ENUM(NSInteger, WMPlayerLayerGravity) {
WMPlayerLayerGravityResize, //
WMPlayerLayerGravityResizeAspect, //
WMPlayerLayerGravityResizeAspectFill //
};
//
typedef NS_ENUM(NSInteger, BackBtnStyle){
BackBtnStyleNone, //
BackBtnStyleClose,//X
BackBtnStylePop //pop<-
};
//
typedef NS_ENUM(NSUInteger,WMControlType) {
WMControlTypeDefault,//
WMControlTypeProgress,//
WMControlTypeVoice,//
WMControlTypeLight//
} ;
@class WMPlayer;
@protocol WMPlayerDelegate <NSObject>
@required
//
-(void)wmplayer:(WMPlayer *)wmplayer clickedCloseButton:(UIButton *)backBtn;
//
-(void)wmplayer:(WMPlayer *)wmplayer clickedFullScreenButton:(UIButton *)fullScreenBtn;
@optional
//
-(void)wmplayer:(WMPlayer *)wmplayer clickedPlayOrPauseButton:(UIButton *)playOrPauseBtn;
//
-(void)wmplayer:(WMPlayer *)wmplayer clickedLockButton:(UIButton *)lockBtn;
//WMPlayer
-(void)wmplayer:(WMPlayer *)wmplayer singleTaped:(UITapGestureRecognizer *)singleTap;
//WMPlayer
-(void)wmplayer:(WMPlayer *)wmplayer doubleTaped:(UITapGestureRecognizer *)doubleTap;
//WMPlayer
-(void)wmplayer:(WMPlayer *)wmplayer isHiddenTopAndBottomView:(BOOL )isHidden;
//
-(void)wmplayerFailedPlay:(WMPlayer *)wmplayer WMPlayerStatus:(WMPlayerState)state;
//
-(void)wmplayerReadyToPlay:(WMPlayer *)wmplayer WMPlayerStatus:(WMPlayerState)state;
//
-(void)wmplayerGotVideoSize:(WMPlayer *)wmplayer videoSize:(CGSize )presentationSize;
//
-(void)wmplayerFinishedPlay:(WMPlayer *)wmplayer;
@end
@interface WMPlayer : UIView
@property(nonatomic,strong)UIView *parentView;
@property(nonatomic,assign)CGRect originFrame;
@property(nonatomic,assign)CGRect oldFrameToWindow;
@property(nonatomic,assign)CGRect beforeBounds;
@property(nonatomic,assign)CGPoint beforeCenter;
@property (nonatomic, assign) WMPlayerViewState viewState;
/**
model
*/
@property (nonatomic,strong) WMPlayerModel *playerModel;
/**
*/
@property (nonatomic, assign) BackBtnStyle backBtnStyle;
/**
BOOLBOOL
*/
@property (nonatomic,assign) BOOL isFullscreen;
/**
(0.51.01.251.52.0)
*/
@property (nonatomic,assign) CGFloat rate;
/**
*/
@property (nonatomic,strong) UIColor *tintColor;
@property (nonatomic,assign,readonly) BOOL prefersStatusBarHidden;
/**
*/
@property (nonatomic, weak)id <WMPlayerDelegate> delegate;
/**
*/
@property (nonatomic,assign) BOOL enableVolumeGesture;
/**
default NO
*/
@property (nonatomic,assign) BOOL enableBackgroundMode;
/**
AirPlaydefault NO
*/
@property (nonatomic,assign) BOOL enableAirPlay;
/**
*/
@property (nonatomic,assign) BOOL enableFastForwardGesture;
/**
*/
@property (nonatomic,assign) BOOL muted;
/**
default NO
*/
@property (nonatomic,assign) BOOL loopPlay;
/**
playerLayer
*/
@property (nonatomic, assign) WMPlayerLayerGravity playerLayerGravity;
//
@property (nonatomic,assign,readonly) BOOL isLockScreen;
/**
-
@param playerModel model
@return
*/
-(instancetype)initPlayerModel:(WMPlayerModel *)playerModel;
/**
++
@param playerModel model
@return
*/
+(instancetype)playerWithModel:(WMPlayerModel *)playerModel;
/**
*/
- (void)play;
/**
*/
- (void)pause;
/**
@return double
*/
- (double)currentTime;
/**
@return double
*/
- (double)duration;
/**
playOrPauseBtn
*/
- (void)playOrPause:(UIButton *)sender;
/**
,
*/
- (void )resetWMPlayer;
/**
@return
*/
+(NSString *)version;
//
+(CGAffineTransform)getCurrentDeviceOrientation;
//iPhone X
+(BOOL)IsiPhoneX;
@end
``` | /content/code_sandbox/WMPlayer/WMPlayer.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 1,065 |
```objective-c
//
// LandscapeRightViewController.h
// PlayerDemo
//
// Created by apple on 2020/5/20.
//
#import "FullScreenHelperViewController.h"
NS_ASSUME_NONNULL_BEGIN
@interface LandscapeRightViewController : FullScreenHelperViewController
@end
NS_ASSUME_NONNULL_END
``` | /content/code_sandbox/WMPlayer/LandscapeRightViewController.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 59 |
```objective-c
//
// LandscapeLeftViewController.m
// PlayerDemo
//
// Created by apple on 2020/5/20.
//
#import "LandscapeLeftViewController.h"
@interface LandscapeLeftViewController ()
@end
@implementation LandscapeLeftViewController
-(UIInterfaceOrientation)preferredInterfaceOrientationForPresentation{
return UIInterfaceOrientationLandscapeLeft;
}
- (void)viewDidLoad {
[super viewDidLoad];
}
- (void)dealloc{
NSLog(@"LandscapeLeftViewController dealloc");
}
@end
``` | /content/code_sandbox/WMPlayer/LandscapeLeftViewController.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 96 |
```objective-c
//
// WMPlayerModel.m
//
//
// Created by zhengwenming on 2018/4/26.
//
#import "WMPlayerModel.h"
@implementation WMPlayerModel
-(void)setPresentationSize:(CGSize)presentationSize{
_presentationSize = presentationSize;
if (presentationSize.width/presentationSize.height<1) {
self.verticalVideo = YES;
}
}
@end
``` | /content/code_sandbox/WMPlayer/WMPlayerModel.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 87 |
```objective-c
//
// WMLightView.h
// WMPlayer
//
// Created by on 16/10/26.
//
#import <UIKit/UIKit.h>
@interface WMLightView : UIView
@property (strong, nonatomic) UIView *lightBackView;
@property (strong, nonatomic) UIImageView *centerLightIV;
@property (strong, nonatomic) NSMutableArray * lightViewArr;
@end
``` | /content/code_sandbox/WMPlayer/WMLightView.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 84 |
```objective-c
//
// LandscapeLeftViewController.h
// PlayerDemo
//
// Created by apple on 2020/5/20.
//
#import "FullScreenHelperViewController.h"
NS_ASSUME_NONNULL_BEGIN
@interface LandscapeLeftViewController : FullScreenHelperViewController
@end
NS_ASSUME_NONNULL_END
``` | /content/code_sandbox/WMPlayer/LandscapeLeftViewController.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 59 |
```objective-c
//
// FastForwardView.m
// WMPlayer
//
// Created by on 16/10/26.
//
#import "FastForwardView.h"
#import "Masonry.h"
@implementation FastForwardView
- (instancetype)init{
self = [super init];
if (self) {
self.backgroundColor = [UIColor colorWithRed:0/256.0f green:0/256.0f blue:0/256.0f alpha:0.8];
self.layer.cornerRadius = 4;
self.layer.masksToBounds = YES;
self.stateImageView = [UIImageView new];
self.stateImageView.image = [UIImage imageNamed:@"progress_icon_r"];
[self addSubview:self.stateImageView];
[self.stateImageView mas_makeConstraints:^(MASConstraintMaker *make) {
make.top.equalTo(self).offset(10);
make.centerX.mas_equalTo(self);
}];
self.timeLabel = [UILabel new];
self.timeLabel.font = [UIFont systemFontOfSize:15.f];
self.timeLabel.textAlignment = NSTextAlignmentCenter;
self.timeLabel.textColor = [UIColor whiteColor];
[self addSubview:self.timeLabel];
[self.timeLabel mas_makeConstraints:^(MASConstraintMaker *make) {
make.bottom.equalTo(self).offset(-10);
make.centerX.mas_equalTo(self);
}];
}
return self;
}
@end
``` | /content/code_sandbox/WMPlayer/FastForwardView.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 273 |
```objective-c
//
// LandscapeRightViewController.m
// PlayerDemo
//
// Created by apple on 2020/5/20.
//
#import "LandscapeRightViewController.h"
@interface LandscapeRightViewController ()
@end
@implementation LandscapeRightViewController
-(UIInterfaceOrientation)preferredInterfaceOrientationForPresentation{
return UIInterfaceOrientationLandscapeRight;
}
- (void)viewDidLoad {
[super viewDidLoad];
}
- (void)dealloc{
NSLog(@"LandscapeRightViewController dealloc");
}
@end
``` | /content/code_sandbox/WMPlayer/LandscapeRightViewController.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 96 |
```objective-c
//
// EnterFullScreenTransition.h
// PlayerDemo
//
// Created by apple on 2020/5/20.
//
#import <Foundation/Foundation.h>
#import <UIKit/UIKit.h>
#import "WMPlayer.h"
NS_ASSUME_NONNULL_BEGIN
@interface EnterFullScreenTransition : NSObject<UIViewControllerAnimatedTransitioning>
- (instancetype)initWithPlayer:(WMPlayer *)wmplayer;
@end
NS_ASSUME_NONNULL_END
``` | /content/code_sandbox/WMPlayer/EnterFullScreenTransition.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 84 |
```objective-c
//
// ExitFullScreenTransition.m
// PlayerDemo
//
// Created by apple on 2020/5/20.
//
#import "ExitFullScreenTransition.h"
#import "Masonry.h"
@interface ExitFullScreenTransition ()
@property(nonatomic,strong)WMPlayer *player;
@end
@implementation ExitFullScreenTransition
- (instancetype)initWithPlayer:(WMPlayer *)wmplayer{
self = [super init];
if (self) {
self.player = wmplayer;
}
return self;
}
#pragma mark - UIViewControllerTransitioningDelegate
- (NSTimeInterval)transitionDuration:(nullable id <UIViewControllerContextTransitioning>)transitionContext{
return 0.30;
}
- (void)animateTransition:(id <UIViewControllerContextTransitioning>)transitionContext{
//view
UIView *containerView = [transitionContext containerView];
//ToVC
UIView *fromView = [transitionContext viewForKey:UITransitionContextFromViewKey];
fromView.backgroundColor = [UIColor clearColor];
UIView *toView = [transitionContext viewForKey:UITransitionContextToViewKey];
CGPoint initialCenter = [containerView convertPoint:self.player.beforeCenter fromView:nil];
[containerView insertSubview:toView belowSubview:fromView];
if ([self.player.parentView isKindOfClass:[UIImageView class]]) {
self.player.frame = CGRectMake(self.player.oldFrameToWindow.origin.x, self.player.oldFrameToWindow.origin.y, self.player.frame.size.width, self.player.frame.size.height);
[[UIApplication sharedApplication].keyWindow addSubview:self.player];
[UIView animateWithDuration:[self transitionDuration:transitionContext] delay:0 options:UIViewAnimationOptionLayoutSubviews animations:^{
fromView.transform = CGAffineTransformIdentity;
fromView.center = initialCenter;
fromView.bounds = self.player.beforeBounds;
} completion:^(BOOL finished) {
[self.player removeFromSuperview];
self.player.frame = self.player.parentView.bounds;
[self.player.parentView addSubview:self.player];
[fromView removeFromSuperview];
[transitionContext completeTransition:YES];
}];
}else{
[UIView animateWithDuration:[self transitionDuration:transitionContext] delay:0 options:UIViewAnimationOptionLayoutSubviews animations:^{
fromView.transform = CGAffineTransformIdentity;
fromView.center = initialCenter;
fromView.bounds = self.player.beforeBounds;
} completion:^(BOOL finished) {
[self.player.parentView addSubview:self.player];
[fromView removeFromSuperview];
[transitionContext completeTransition:YES];
}];
}
}
@end
``` | /content/code_sandbox/WMPlayer/ExitFullScreenTransition.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 510 |
```objective-c
//
// FullScreenHelperViewController.m
// PlayerDemo
//
// Created by apple on 2020/5/18.
//
#import "FullScreenHelperViewController.h"
@interface FullScreenHelperViewController ()<WMPlayerDelegate>
@end
@implementation FullScreenHelperViewController
-(BOOL)shouldAutorotate{
return YES;
}
-(UIInterfaceOrientationMask)supportedInterfaceOrientations{
return UIInterfaceOrientationMaskLandscape;
}
- (void)viewDidLoad {
[super viewDidLoad];
self.wmPlayer.delegate = self;
self.view.backgroundColor = [UIColor whiteColor];
[[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(onDeviceOrientationChange:)
name:UIDeviceOrientationDidChangeNotification
object:nil];
}
///CloseButton
-(void)wmplayer:(WMPlayer *)wmplayer clickedCloseButton:(UIButton *)closeBtn{
if (wmplayer.isFullscreen) {
[self exitFullScreen];
}else{
if (self.presentingViewController) {
[self dismissViewControllerAnimated:YES completion:^{
}];
}else{
[self.navigationController popViewControllerAnimated:YES];
}
}
}
///
-(void)wmplayer:(WMPlayer *)wmplayer clickedFullScreenButton:(UIButton *)fullScreenBtn{
}
-(void)exitFullScreen{
self.wmPlayer.isFullscreen = NO;
[self dismissViewControllerAnimated:YES completion:^{
self.wmPlayer.viewState = PlayerViewStateSmall;
}];
}
/**
*
*/
- (void)onDeviceOrientationChange:(NSNotification *)notification{
if (self.wmPlayer.isLockScreen){
return;
}
if (self.wmPlayer.viewState!=PlayerViewStateFullScreen) {
return;
}
UIDeviceOrientation orientation = [UIDevice currentDevice].orientation;
UIInterfaceOrientation interfaceOrientation = (UIInterfaceOrientation)orientation;
switch (interfaceOrientation) {
case UIInterfaceOrientationPortraitUpsideDown:{
NSLog(@"3---");
}
break;
case UIInterfaceOrientationPortrait:{
NSLog(@"0---");
self.wmPlayer.viewState = PlayerViewStateAnimating;
[self exitFullScreen];
}
break;
case UIInterfaceOrientationLandscapeLeft:{
NSLog(@"2---");
}
break;
case UIInterfaceOrientationLandscapeRight:{
NSLog(@"1---");
}
break;
default:
break;
}
}
- (void)viewDidLayoutSubviews{
[super viewDidLayoutSubviews];
}
- (void)dealloc{
[[NSNotificationCenter defaultCenter] removeObserver:self];
NSLog(@"FullScreenHelperViewController dealloc");
}
@end
``` | /content/code_sandbox/WMPlayer/FullScreenHelperViewController.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 521 |
```objective-c
//
// WMLightView.m
// WMPlayer
//
// Created by on 16/10/26.
//
#import "WMLightView.h"
#import "WMPlayer.h"
#define LIGHT_VIEW_COUNT 16
@interface WMLightView ()
@end
@implementation WMLightView
- (instancetype)init{
self = [super init];
if (self) {
self.backgroundColor = [UIColor whiteColor];
self.frame = CGRectMake(([UIScreen mainScreen].bounds.size.width) * 0.5, ([UIScreen mainScreen].bounds.size.height) * 0.5, 155, 155);
self.layer.cornerRadius = 10;
{
UILabel *titleLabel = [[UILabel alloc] initWithFrame:CGRectMake(0, 5, self.bounds.size.width, 30)];
titleLabel.font = [UIFont boldSystemFontOfSize:16.0];
titleLabel.textColor = [UIColor colorWithRed:0.25f green:0.22f blue:0.21f alpha:1.00f];
titleLabel.textAlignment = NSTextAlignmentCenter;
titleLabel.text = @"";
[self addSubview:titleLabel];
}
{
self.centerLightIV = [[UIImageView alloc] initWithFrame:CGRectMake(0, 0, 79, 76)];
self.centerLightIV.image = [UIImage imageNamed:[@"WMPlayer.bundle" stringByAppendingPathComponent:@"play_new_brightness_day"]];
self.centerLightIV.center = CGPointMake(155 * 0.5, 155 * 0.5);
[self addSubview:self.centerLightIV];
}
{
self.lightBackView = [[UIView alloc]initWithFrame:CGRectMake(13, 132, self.bounds.size.width - 26, 7)];
self.lightBackView .backgroundColor = [UIColor colorWithRed:65.0/255.0 green:67.0/255.0 blue:70.0/255.0 alpha:1.0];
[self addSubview:self.lightBackView ];
}
self.lightViewArr = [NSMutableArray arrayWithCapacity:16];
CGFloat wiew_width = (self.lightBackView.bounds.size.width - 17) / 16;
CGFloat wiew_Height = 5;
CGFloat wiew_Y = 1;
for (int i = 0; i < LIGHT_VIEW_COUNT; i++) {
CGFloat wiew_X = i * (wiew_width + 1) + 1;
UIView * view = [[UIView alloc] initWithFrame:CGRectMake(wiew_X, wiew_Y, wiew_width, wiew_Height)];
view.backgroundColor = [UIColor whiteColor];
[self.lightViewArr addObject:view];
[self.lightBackView addSubview:view];
}
[self updateLongView:[UIScreen mainScreen].brightness];
//
[[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(onOrientationDidChange:)
name:UIDeviceOrientationDidChangeNotification
object:nil];
//KVO
[[UIScreen mainScreen] addObserver:self
forKeyPath:@"brightness"
options:NSKeyValueObservingOptionNew context:NULL];
self.alpha = 0.0;
}
return self;
}
- (void)observeValueForKeyPath:(NSString *)keyPath
ofObject:(id)object
change:(NSDictionary *)change
context:(void *)context {
CGFloat sound = [change[@"new"] floatValue];
if (self.alpha == 0.0) {
self.alpha = 1.0;
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(1.0 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{
[self hideLightView];
});
}
[self updateLongView:sound];
}
- (void)onOrientationDidChange:(NSNotification *)notify {
self.alpha = 0.0;
}
- (void)hideLightView{
if (self.alpha == 1.0) {
[UIView animateWithDuration:0.8 animations:^{
self.alpha = 0.0;
} completion:^(BOOL finished) {
}];
}
}
#pragma mark - Update View
- (void)updateLongView:(CGFloat)sound {
CGFloat stage = 1 / 15.0;
NSInteger level = sound / stage;
for (int i = 0; i < self.lightViewArr.count; i++) {
UIView *aView = self.lightViewArr[i];
if (i <= level) {
aView.hidden = NO;
} else {
aView.hidden = YES;
}
}
[self setNeedsLayout];
[self.superview bringSubviewToFront:self];
}
- (void)layoutSubviews {
[super layoutSubviews];
self.transform = [WMPlayer getCurrentDeviceOrientation];
self.transform = CGAffineTransformIdentity;
self.center = self.superview.center;
}
- (void)dealloc {
self.lightViewArr = nil;
self.lightBackView = nil;
[[UIScreen mainScreen] removeObserver:self forKeyPath:@"brightness"];
[[NSNotificationCenter defaultCenter] removeObserver:self];
}
@end
``` | /content/code_sandbox/WMPlayer/WMLightView.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 1,052 |
```objective-c
//
// WMPlayerModel.h
//
//
// Created by zhengwenming on 2018/4/26.
//
#import <Foundation/Foundation.h>
#import <UIKit/UIKit.h>
#import <AVFoundation/AVFoundation.h>
@interface WMPlayerModel : NSObject
//
@property (nonatomic, copy) NSString *title;
//URLorhttp
@property (nonatomic, strong) NSURL *videoURL;
//videoURLplayerItem
@property (nonatomic, strong) AVPlayerItem *playerItem;
//seekTime
@property (nonatomic, assign) double seekTime;
@property (nonatomic, strong) NSIndexPath *indexPath;
//
@property (nonatomic,assign) CGSize presentationSize;
//wh<1
@property (nonatomic,assign) BOOL verticalVideo;
@end
``` | /content/code_sandbox/WMPlayer/WMPlayerModel.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 159 |
```objective-c
//
// EnterFullScreenTransition.m
// PlayerDemo
//
// Created by apple on 2020/5/20.
//
#import "EnterFullScreenTransition.h"
#import "Masonry.h"
@interface EnterFullScreenTransition ()
@property(nonatomic,strong)WMPlayer *wmplayer;
@end
@implementation EnterFullScreenTransition
- (instancetype)initWithPlayer:(WMPlayer *)wmplayer{
self = [super init];
if (self) {
self.wmplayer = wmplayer;
}
return self;
}
#pragma mark - UIViewControllerTransitioningDelegate
- (NSTimeInterval)transitionDuration:(nullable id <UIViewControllerContextTransitioning>)transitionContext{
return 0.30;
}
- (void)animateTransition:(id <UIViewControllerContextTransitioning>)transitionContext{
UIView *containerView = [transitionContext containerView];
UIViewController *toViewController = [transitionContext viewControllerForKey:UITransitionContextToViewControllerKey];
// UIViewController *fromViewController = [transitionContext viewControllerForKey:UITransitionContextFromViewControllerKey];
UIView *toView = [transitionContext viewForKey:UITransitionContextToViewKey];
toView.backgroundColor = [UIColor clearColor];
CGPoint initialCenter = [containerView convertPoint:self.wmplayer.beforeCenter fromView:self.wmplayer];
[containerView addSubview:toView];
[toView addSubview:self.wmplayer];
toView.bounds = self.wmplayer.beforeBounds;
toView.center = initialCenter;
if ([toViewController isKindOfClass:[NSClassFromString(@"LandscapeLeftViewController") class]]) {
toView.transform = CGAffineTransformMakeRotation(M_PI_2);
}else{
toView.transform = CGAffineTransformMakeRotation(-M_PI_2);
}
[UIView animateWithDuration:[self transitionDuration:transitionContext] delay:0 options:UIViewAnimationOptionLayoutSubviews animations:^{
toView.transform = CGAffineTransformIdentity;
toView.bounds = containerView.bounds;
toView.center = containerView.center;
} completion:^(BOOL finished) {
[transitionContext completeTransition:YES];
}];
}
@end
``` | /content/code_sandbox/WMPlayer/EnterFullScreenTransition.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 423 |
```objective-c
//
// FullScreenHelperViewController.h
// PlayerDemo
//
// Created by apple on 2020/5/18.
//
#import <UIKit/UIKit.h>
#import "WMPlayer.h"
NS_ASSUME_NONNULL_BEGIN
@interface FullScreenHelperViewController : UIViewController
@property(nonatomic,strong)WMPlayer *wmPlayer;
@end
NS_ASSUME_NONNULL_END
``` | /content/code_sandbox/WMPlayer/FullScreenHelperViewController.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 73 |
```objective-c
//
// ExitFullScreenTransition.h
// PlayerDemo
//
// Created by apple on 2020/5/20.
//
#import <Foundation/Foundation.h>
#import <UIKit/UIKit.h>
#import "WMPlayer.h"
NS_ASSUME_NONNULL_BEGIN
@interface ExitFullScreenTransition : NSObject<UIViewControllerAnimatedTransitioning>
- (instancetype)initWithPlayer:(WMPlayer *)wmplayer;
@end
NS_ASSUME_NONNULL_END
``` | /content/code_sandbox/WMPlayer/ExitFullScreenTransition.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 84 |
```objective-c
//
// FastForwardView.h
// WMPlayer
//
// Created by on 16/10/26.
//
#import <UIKit/UIKit.h>
@interface FastForwardView : UIView
@property (strong, nonatomic) UIImageView *stateImageView;
@property (strong, nonatomic) UILabel *timeLabel;
@end
``` | /content/code_sandbox/WMPlayer/FastForwardView.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 68 |
```unknown
<?xml version="1.0" encoding="UTF-8"?>
<Workspace
version = "1.0">
<FileRef
location = "group:PlayerDemo.xcodeproj">
</FileRef>
<FileRef
location = "group:Pods/Pods.xcodeproj">
</FileRef>
</Workspace>
``` | /content/code_sandbox/PlayerDemo.xcworkspace/contents.xcworkspacedata | unknown | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 75 |
```xml
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "path_to_url">
<plist version="1.0">
<dict>
<key>IDEDidComputeMac32BitWarning</key>
<true/>
</dict>
</plist>
``` | /content/code_sandbox/PlayerDemo.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist | xml | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 72 |
```unknown
<?xml version="1.0" encoding="UTF-8"?>
<Bucket
uuid = "BFE6AA0C-AD50-4E24-86A5-80EE28066F74"
type = "0"
version = "2.0">
<Breakpoints>
<BreakpointProxy
BreakpointExtensionID = "Xcode.Breakpoint.FileBreakpoint">
<BreakpointContent
uuid = "7FE92A26-C13D-4E5B-B9A7-1FB4A4DD848A"
shouldBeEnabled = "No"
ignoreCount = "0"
continueAfterRunningActions = "No"
filePath = "PlayerDemo/ViewControllers/VCRotateViewController.m"
startingColumnNumber = "9223372036854775807"
endingColumnNumber = "9223372036854775807"
startingLineNumber = "17"
endingLineNumber = "17"
landmarkName = "-shouldAutorotate"
landmarkType = "7">
</BreakpointContent>
</BreakpointProxy>
<BreakpointProxy
BreakpointExtensionID = "Xcode.Breakpoint.FileBreakpoint">
<BreakpointContent
uuid = "47C7346C-1D39-4A30-905E-E54956DD90D1"
shouldBeEnabled = "No"
ignoreCount = "0"
continueAfterRunningActions = "No"
filePath = "PlayerDemo/ViewControllers/VCRotateViewController.m"
startingColumnNumber = "9223372036854775807"
endingColumnNumber = "9223372036854775807"
startingLineNumber = "23"
endingLineNumber = "23"
landmarkName = "-supportedInterfaceOrientations"
landmarkType = "7">
</BreakpointContent>
</BreakpointProxy>
<BreakpointProxy
BreakpointExtensionID = "Xcode.Breakpoint.FileBreakpoint">
<BreakpointContent
uuid = "976D9863-293A-47CB-8EC4-EFB986E76956"
shouldBeEnabled = "Yes"
ignoreCount = "0"
continueAfterRunningActions = "No"
filePath = "PlayerDemo/ViewControllers/VCRotateViewController.m"
startingColumnNumber = "9223372036854775807"
endingColumnNumber = "9223372036854775807"
startingLineNumber = "26"
endingLineNumber = "26"
landmarkName = "-preferredInterfaceOrientationForPresentation"
landmarkType = "7">
</BreakpointContent>
</BreakpointProxy>
<BreakpointProxy
BreakpointExtensionID = "Xcode.Breakpoint.FileBreakpoint">
<BreakpointContent
uuid = "377EF2CF-E82D-4129-8B09-47F81E296FC6"
shouldBeEnabled = "Yes"
ignoreCount = "0"
continueAfterRunningActions = "No"
filePath = "PlayerDemo/ViewControllers/VCRotateViewController.m"
startingColumnNumber = "9223372036854775807"
endingColumnNumber = "9223372036854775807"
startingLineNumber = "61"
endingLineNumber = "61"
landmarkName = "-wmplayer:clickedCloseButton:"
landmarkType = "7">
<Locations>
<Location
uuid = "377EF2CF-E82D-4129-8B09-47F81E296FC6 - ddfdeb10138eece8"
shouldBeEnabled = "Yes"
ignoreCount = "0"
continueAfterRunningActions = "No"
symbolName = "-[VCRotateViewController wmplayer:clickedCloseButton:]"
moduleName = "PlayerDemo"
usesParentBreakpointCondition = "Yes"
urlString = "file:///Users/kefu/Desktop/Github/WMPlayer/PlayerDemo/ViewControllers/VCRotateViewController.m"
startingColumnNumber = "9223372036854775807"
endingColumnNumber = "9223372036854775807"
startingLineNumber = "61"
endingLineNumber = "61"
offsetFromSymbolStart = "144">
</Location>
<Location
uuid = "377EF2CF-E82D-4129-8B09-47F81E296FC6 - ca71ba606e0b59b7"
shouldBeEnabled = "Yes"
ignoreCount = "0"
continueAfterRunningActions = "No"
symbolName = "__54-[VCRotateViewController wmplayer:clickedCloseButton:]_block_invoke"
moduleName = "PlayerDemo"
usesParentBreakpointCondition = "Yes"
urlString = "file:///Users/kefu/Desktop/Github/WMPlayer/PlayerDemo/ViewControllers/VCRotateViewController.m"
startingColumnNumber = "9223372036854775807"
endingColumnNumber = "9223372036854775807"
startingLineNumber = "63"
endingLineNumber = "63"
offsetFromSymbolStart = "16">
</Location>
</Locations>
</BreakpointContent>
</BreakpointProxy>
<BreakpointProxy
BreakpointExtensionID = "Xcode.Breakpoint.FileBreakpoint">
<BreakpointContent
uuid = "073DB488-3EC8-4CF5-8100-AD08E8BC010E"
shouldBeEnabled = "Yes"
ignoreCount = "0"
continueAfterRunningActions = "No"
filePath = "PlayerDemo/ViewControllers/VCRotateViewController.m"
startingColumnNumber = "9223372036854775807"
endingColumnNumber = "9223372036854775807"
startingLineNumber = "66"
endingLineNumber = "66"
landmarkName = "-wmplayer:clickedCloseButton:"
landmarkType = "7">
</BreakpointContent>
</BreakpointProxy>
<BreakpointProxy
BreakpointExtensionID = "Xcode.Breakpoint.FileBreakpoint">
<BreakpointContent
uuid = "9595CEB6-44F9-418C-A8B6-DA13CCBD59C2"
shouldBeEnabled = "Yes"
ignoreCount = "0"
continueAfterRunningActions = "No"
filePath = "PlayerDemo/ViewControllers/VCRotateViewController.m"
startingColumnNumber = "9223372036854775807"
endingColumnNumber = "9223372036854775807"
startingLineNumber = "85"
endingLineNumber = "85"
landmarkName = "-changeInterfaceOrientation:"
landmarkType = "7">
</BreakpointContent>
</BreakpointProxy>
<BreakpointProxy
BreakpointExtensionID = "Xcode.Breakpoint.FileBreakpoint">
<BreakpointContent
uuid = "E958A5B1-4B9F-4ACD-91F5-A78E9740404F"
shouldBeEnabled = "Yes"
ignoreCount = "0"
continueAfterRunningActions = "No"
filePath = "PlayerDemo/ViewControllers/VCRotateViewController.m"
startingColumnNumber = "9223372036854775807"
endingColumnNumber = "9223372036854775807"
startingLineNumber = "103"
endingLineNumber = "103"
landmarkName = "-changeInterfaceOrientation:"
landmarkType = "7">
<Locations>
<Location
uuid = "E958A5B1-4B9F-4ACD-91F5-A78E9740404F - cad6f3539c7f71e4"
shouldBeEnabled = "Yes"
ignoreCount = "0"
continueAfterRunningActions = "No"
symbolName = "-[VCRotateViewController changeInterfaceOrientation:]"
moduleName = "PlayerDemo"
usesParentBreakpointCondition = "Yes"
urlString = "file:///Users/kefu/Desktop/Github/WMPlayer/PlayerDemo/ViewControllers/VCRotateViewController.m"
startingColumnNumber = "9223372036854775807"
endingColumnNumber = "9223372036854775807"
startingLineNumber = "103"
endingLineNumber = "103"
offsetFromSymbolStart = "876">
</Location>
<Location
uuid = "E958A5B1-4B9F-4ACD-91F5-A78E9740404F - cad6f3539c7f71e4"
shouldBeEnabled = "Yes"
ignoreCount = "0"
continueAfterRunningActions = "No"
symbolName = "-[VCRotateViewController changeInterfaceOrientation:]"
moduleName = "PlayerDemo"
usesParentBreakpointCondition = "Yes"
urlString = "file:///Users/kefu/Desktop/Github/WMPlayer/PlayerDemo/ViewControllers/VCRotateViewController.m"
startingColumnNumber = "9223372036854775807"
endingColumnNumber = "9223372036854775807"
startingLineNumber = "103"
endingLineNumber = "103"
offsetFromSymbolStart = "928">
</Location>
</Locations>
</BreakpointContent>
</BreakpointProxy>
<BreakpointProxy
BreakpointExtensionID = "Xcode.Breakpoint.FileBreakpoint">
<BreakpointContent
uuid = "EE423066-1B76-499A-896D-9E67D94B0B0D"
shouldBeEnabled = "Yes"
ignoreCount = "0"
continueAfterRunningActions = "No"
filePath = "PlayerDemo/ViewControllers/VCRotateViewController.m"
startingColumnNumber = "9223372036854775807"
endingColumnNumber = "9223372036854775807"
startingLineNumber = "136"
endingLineNumber = "136"
landmarkName = "-wmplayer:clickedFullScreenButton:"
landmarkType = "7">
</BreakpointContent>
</BreakpointProxy>
<BreakpointProxy
BreakpointExtensionID = "Xcode.Breakpoint.FileBreakpoint">
<BreakpointContent
uuid = "1F0F6484-6FF7-415D-B259-5BC97E36116D"
shouldBeEnabled = "Yes"
ignoreCount = "0"
continueAfterRunningActions = "No"
filePath = "PlayerDemo/ViewControllers/VCRotateViewController.m"
startingColumnNumber = "9223372036854775807"
endingColumnNumber = "9223372036854775807"
startingLineNumber = "138"
endingLineNumber = "138"
landmarkName = "-wmplayer:clickedFullScreenButton:"
landmarkType = "7">
</BreakpointContent>
</BreakpointProxy>
<BreakpointProxy
BreakpointExtensionID = "Xcode.Breakpoint.FileBreakpoint">
<BreakpointContent
uuid = "7703EDD2-9B4E-4DB9-A434-24DA709EBEED"
shouldBeEnabled = "Yes"
ignoreCount = "0"
continueAfterRunningActions = "No"
filePath = "PlayerDemo/ViewControllers/VCRotateViewController.m"
startingColumnNumber = "9223372036854775807"
endingColumnNumber = "9223372036854775807"
startingLineNumber = "152"
endingLineNumber = "152"
landmarkName = "-onDeviceOrientationChange:"
landmarkType = "7">
</BreakpointContent>
</BreakpointProxy>
<BreakpointProxy
BreakpointExtensionID = "Xcode.Breakpoint.FileBreakpoint">
<BreakpointContent
uuid = "987A2A4A-6366-429E-B48B-62894968BCB2"
shouldBeEnabled = "Yes"
ignoreCount = "0"
continueAfterRunningActions = "No"
filePath = "PlayerDemo/ViewControllers/VCRotateViewController.m"
startingColumnNumber = "9223372036854775807"
endingColumnNumber = "9223372036854775807"
startingLineNumber = "156"
endingLineNumber = "156"
landmarkName = "-onDeviceOrientationChange:"
landmarkType = "7">
</BreakpointContent>
</BreakpointProxy>
<BreakpointProxy
BreakpointExtensionID = "Xcode.Breakpoint.FileBreakpoint">
<BreakpointContent
uuid = "8217645A-8580-45D2-84A5-DAF2C88E991B"
shouldBeEnabled = "Yes"
ignoreCount = "0"
continueAfterRunningActions = "No"
filePath = "PlayerDemo/ViewControllers/VCRotateViewController.m"
startingColumnNumber = "9223372036854775807"
endingColumnNumber = "9223372036854775807"
startingLineNumber = "161"
endingLineNumber = "161"
landmarkName = "-onDeviceOrientationChange:"
landmarkType = "7">
</BreakpointContent>
</BreakpointProxy>
<BreakpointProxy
BreakpointExtensionID = "Xcode.Breakpoint.FileBreakpoint">
<BreakpointContent
uuid = "ADC2C375-73E4-471E-B0E5-14DA0A7877AF"
shouldBeEnabled = "Yes"
ignoreCount = "0"
continueAfterRunningActions = "No"
filePath = "PlayerDemo/ViewControllers/VCRotateViewController.m"
startingColumnNumber = "9223372036854775807"
endingColumnNumber = "9223372036854775807"
startingLineNumber = "166"
endingLineNumber = "166"
landmarkName = "-onDeviceOrientationChange:"
landmarkType = "7">
</BreakpointContent>
</BreakpointProxy>
<BreakpointProxy
BreakpointExtensionID = "Xcode.Breakpoint.FileBreakpoint">
<BreakpointContent
uuid = "BA0E43E2-13AC-4313-B6D0-53EDFB6218B9"
shouldBeEnabled = "Yes"
ignoreCount = "0"
continueAfterRunningActions = "No"
filePath = "PlayerDemo/ViewControllers/VCRotateViewController.m"
startingColumnNumber = "9223372036854775807"
endingColumnNumber = "9223372036854775807"
startingLineNumber = "146"
endingLineNumber = "146"
landmarkName = "-onDeviceOrientationChange:"
landmarkType = "7">
</BreakpointContent>
</BreakpointProxy>
<BreakpointProxy
BreakpointExtensionID = "Xcode.Breakpoint.FileBreakpoint">
<BreakpointContent
uuid = "CE894D9B-FDB4-4CFB-9626-529CF515C10F"
shouldBeEnabled = "Yes"
ignoreCount = "0"
continueAfterRunningActions = "No"
filePath = "PlayerDemo/ViewControllers/VCRotateViewController.m"
startingColumnNumber = "9223372036854775807"
endingColumnNumber = "9223372036854775807"
startingLineNumber = "57"
endingLineNumber = "57"
landmarkName = "-wmplayer:clickedCloseButton:"
landmarkType = "7">
</BreakpointContent>
</BreakpointProxy>
<BreakpointProxy
BreakpointExtensionID = "Xcode.Breakpoint.FileBreakpoint">
<BreakpointContent
uuid = "261C5D76-46FB-4986-8610-836005A7225C"
shouldBeEnabled = "Yes"
ignoreCount = "0"
continueAfterRunningActions = "No"
filePath = "PlayerDemo/ViewControllers/VCRotateViewController.m"
startingColumnNumber = "9223372036854775807"
endingColumnNumber = "9223372036854775807"
startingLineNumber = "82"
endingLineNumber = "82"
landmarkName = "-changeInterfaceOrientation:"
landmarkType = "7">
</BreakpointContent>
</BreakpointProxy>
</Breakpoints>
</Bucket>
``` | /content/code_sandbox/PlayerDemo.xcworkspace/xcuserdata/kefu.xcuserdatad/xcdebugger/Breakpoints_v2.xcbkptlist | unknown | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 3,471 |
```unknown
<?xml version="1.0" encoding="UTF-8"?>
<Bucket
uuid = "88098F99-9FDE-4588-8933-92296F45210B"
type = "0"
version = "2.0">
<Breakpoints>
<BreakpointProxy
BreakpointExtensionID = "Xcode.Breakpoint.FileBreakpoint">
<BreakpointContent
uuid = "C597733B-162E-4CBE-BFE8-CFB19362DFFA"
shouldBeEnabled = "Yes"
ignoreCount = "0"
continueAfterRunningActions = "No"
filePath = "WMPlayer/RotateAnimator.m"
startingColumnNumber = "9223372036854775807"
endingColumnNumber = "9223372036854775807"
startingLineNumber = "90"
endingLineNumber = "90"
landmarkName = "unknown"
landmarkType = "0">
</BreakpointContent>
</BreakpointProxy>
<BreakpointProxy
BreakpointExtensionID = "Xcode.Breakpoint.FileBreakpoint">
<BreakpointContent
uuid = "44E3E332-D4E9-43F5-BBB4-A2360ABCFA14"
shouldBeEnabled = "Yes"
ignoreCount = "0"
continueAfterRunningActions = "No"
filePath = "WMPlayer/RotateAnimator.m"
startingColumnNumber = "9223372036854775807"
endingColumnNumber = "9223372036854775807"
startingLineNumber = "93"
endingLineNumber = "93"
landmarkName = "unknown"
landmarkType = "0">
</BreakpointContent>
</BreakpointProxy>
<BreakpointProxy
BreakpointExtensionID = "Xcode.Breakpoint.ExceptionBreakpoint">
<BreakpointContent
uuid = "902136FB-6B9E-492D-B2A1-3250C51600C6"
shouldBeEnabled = "Yes"
ignoreCount = "0"
continueAfterRunningActions = "No"
breakpointStackSelectionBehavior = "1"
scope = "1"
stopOnStyle = "0">
</BreakpointContent>
</BreakpointProxy>
<BreakpointProxy
BreakpointExtensionID = "Xcode.Breakpoint.FileBreakpoint">
<BreakpointContent
uuid = "0D77D3D7-715F-47D6-9432-95AC216A5866"
shouldBeEnabled = "Yes"
ignoreCount = "0"
continueAfterRunningActions = "No"
filePath = "WMPlayer/WMPlayer.m"
startingColumnNumber = "9223372036854775807"
endingColumnNumber = "9223372036854775807"
startingLineNumber = "1350"
endingLineNumber = "1350"
landmarkName = "-resetWMPlayer"
landmarkType = "7">
</BreakpointContent>
</BreakpointProxy>
</Breakpoints>
</Bucket>
``` | /content/code_sandbox/PlayerDemo.xcworkspace/xcuserdata/apple.xcuserdatad/xcdebugger/Breakpoints_v2.xcbkptlist | unknown | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 661 |
```unknown
<?xml version="1.0" encoding="UTF-8"?>
<Bucket
uuid = "800E5736-FBAD-41E0-86CF-58096932A072"
type = "0"
version = "2.0">
<Breakpoints>
<BreakpointProxy
BreakpointExtensionID = "Xcode.Breakpoint.FileBreakpoint">
<BreakpointContent
uuid = "BB426EE7-48AD-4974-8E03-E3AFB939DDEF"
shouldBeEnabled = "Yes"
ignoreCount = "0"
continueAfterRunningActions = "No"
filePath = "WMPlayer/ExitFullScreenTransition.m"
startingColumnNumber = "9223372036854775807"
endingColumnNumber = "9223372036854775807"
startingLineNumber = "56"
endingLineNumber = "56"
landmarkName = "-animateTransition:"
landmarkType = "7">
<Locations>
<Location
uuid = "BB426EE7-48AD-4974-8E03-E3AFB939DDEF - 8cb59e1bdfd710ac"
shouldBeEnabled = "Yes"
ignoreCount = "0"
continueAfterRunningActions = "No"
symbolName = "-[ExitFullScreenTransition animateTransition:]"
moduleName = "PlayerDemo"
usesParentBreakpointCondition = "Yes"
urlString = "file:///Users/apple/Desktop/Github/WMPlayer/WMPlayer/ExitFullScreenTransition.m"
startingColumnNumber = "9223372036854775807"
endingColumnNumber = "9223372036854775807"
startingLineNumber = "57"
endingLineNumber = "57"
offsetFromSymbolStart = "1104">
</Location>
<Location
uuid = "BB426EE7-48AD-4974-8E03-E3AFB939DDEF - e9abe63e6a5139b"
shouldBeEnabled = "Yes"
ignoreCount = "0"
continueAfterRunningActions = "No"
symbolName = "__46-[ExitFullScreenTransition animateTransition:]_block_invoke.58"
moduleName = "PlayerDemo"
usesParentBreakpointCondition = "Yes"
urlString = "file:///Users/apple/Desktop/Github/WMPlayer/WMPlayer/ExitFullScreenTransition.m"
startingColumnNumber = "9223372036854775807"
endingColumnNumber = "9223372036854775807"
startingLineNumber = "58"
endingLineNumber = "58"
offsetFromSymbolStart = "32">
</Location>
</Locations>
</BreakpointContent>
</BreakpointProxy>
<BreakpointProxy
BreakpointExtensionID = "Xcode.Breakpoint.FileBreakpoint">
<BreakpointContent
uuid = "380C78DF-7554-49DF-8092-0109CAA6432C"
shouldBeEnabled = "Yes"
ignoreCount = "0"
continueAfterRunningActions = "No"
filePath = "WMPlayer/ExitFullScreenTransition.m"
startingColumnNumber = "9223372036854775807"
endingColumnNumber = "9223372036854775807"
startingLineNumber = "44"
endingLineNumber = "44"
landmarkName = "-animateTransition:"
landmarkType = "7">
<Locations>
<Location
uuid = "380C78DF-7554-49DF-8092-0109CAA6432C - 8cb59e1bdfd71262"
shouldBeEnabled = "Yes"
ignoreCount = "0"
continueAfterRunningActions = "No"
symbolName = "-[ExitFullScreenTransition animateTransition:]"
moduleName = "PlayerDemo"
usesParentBreakpointCondition = "Yes"
urlString = "file:///Users/apple/Desktop/Github/WMPlayer/WMPlayer/ExitFullScreenTransition.m"
startingColumnNumber = "9223372036854775807"
endingColumnNumber = "9223372036854775807"
startingLineNumber = "43"
endingLineNumber = "43"
offsetFromSymbolStart = "732">
</Location>
<Location
uuid = "380C78DF-7554-49DF-8092-0109CAA6432C - 24479e2126b1c193"
shouldBeEnabled = "Yes"
ignoreCount = "0"
continueAfterRunningActions = "No"
symbolName = "__46-[ExitFullScreenTransition animateTransition:]_block_invoke"
moduleName = "PlayerDemo"
usesParentBreakpointCondition = "Yes"
urlString = "file:///Users/apple/Desktop/Github/WMPlayer/WMPlayer/ExitFullScreenTransition.m"
startingColumnNumber = "9223372036854775807"
endingColumnNumber = "9223372036854775807"
startingLineNumber = "44"
endingLineNumber = "44"
offsetFromSymbolStart = "40">
</Location>
</Locations>
</BreakpointContent>
</BreakpointProxy>
<BreakpointProxy
BreakpointExtensionID = "Xcode.Breakpoint.FileBreakpoint">
<BreakpointContent
uuid = "BEBD9686-9261-416D-8A66-641382D6AA4A"
shouldBeEnabled = "Yes"
ignoreCount = "0"
continueAfterRunningActions = "No"
filePath = "WMPlayer/FullScreenHelperViewController.m"
startingColumnNumber = "9223372036854775807"
endingColumnNumber = "9223372036854775807"
startingLineNumber = "75"
endingLineNumber = "75"
landmarkName = "-onDeviceOrientationChange:"
landmarkType = "7">
</BreakpointContent>
</BreakpointProxy>
<BreakpointProxy
BreakpointExtensionID = "Xcode.Breakpoint.FileBreakpoint">
<BreakpointContent
uuid = "C6BEEEEB-DB17-479A-9674-B3937986215D"
shouldBeEnabled = "Yes"
ignoreCount = "0"
continueAfterRunningActions = "No"
filePath = "WMPlayer/LandscapeLeftViewController.m"
startingColumnNumber = "9223372036854775807"
endingColumnNumber = "9223372036854775807"
startingLineNumber = "23"
endingLineNumber = "23"
landmarkName = "-dealloc"
landmarkType = "7">
</BreakpointContent>
</BreakpointProxy>
<BreakpointProxy
BreakpointExtensionID = "Xcode.Breakpoint.FileBreakpoint">
<BreakpointContent
uuid = "FDA78DA6-EE38-47D3-81C0-9B9B05F26063"
shouldBeEnabled = "Yes"
ignoreCount = "0"
continueAfterRunningActions = "No"
filePath = "WMPlayer/FullScreenHelperViewController.m"
startingColumnNumber = "9223372036854775807"
endingColumnNumber = "9223372036854775807"
startingLineNumber = "61"
endingLineNumber = "61"
landmarkName = "-onDeviceOrientationChange:"
landmarkType = "7">
</BreakpointContent>
</BreakpointProxy>
<BreakpointProxy
BreakpointExtensionID = "Xcode.Breakpoint.FileBreakpoint">
<BreakpointContent
uuid = "15E5F90A-EBE2-4F09-88BE-01926376CA98"
shouldBeEnabled = "Yes"
ignoreCount = "0"
continueAfterRunningActions = "No"
filePath = "WMPlayer/ExitFullScreenTransition.m"
startingColumnNumber = "9223372036854775807"
endingColumnNumber = "9223372036854775807"
startingLineNumber = "39"
endingLineNumber = "39"
landmarkName = "-animateTransition:"
landmarkType = "7">
</BreakpointContent>
</BreakpointProxy>
</Breakpoints>
</Bucket>
``` | /content/code_sandbox/PlayerDemo.xcworkspace/xcuserdata/zhengwenming.xcuserdatad/xcdebugger/Breakpoints_v2.xcbkptlist | unknown | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 1,738 |
```objective-c
/*!
@header WMPlayer.m
@abstract Githubpath_to_url
CSDN:path_to_url
@author Created by zhengwenming on 16/1/24
@version 2.0.0 16/1/24 Creation()
*/
#import "WMPlayer.h"
//*************************************************************
#define WMPlayerSrcName(file) [@"WMPlayer.bundle" stringByAppendingPathComponent:file]
#define WMPlayerFrameworkSrcName(file) [@"Frameworks/WMPlayer.framework/WMPlayer.bundle" stringByAppendingPathComponent:file]
#define WMPlayerImage(file) [UIImage imageNamed:WMPlayerSrcName(file)] ? :[UIImage imageNamed:WMPlayerFrameworkSrcName(file)]
//
#define TotalScreenTime 90
#define LeastDistance 15
static void *PlayViewCMTimeValue = &PlayViewCMTimeValue;
static void *PlayViewStatusObservationContext = &PlayViewStatusObservationContext;
@interface WMPlayer () <UIGestureRecognizerDelegate,AVRoutePickerViewDelegate,AVPictureInPictureControllerDelegate>
@property(nonatomic, strong)AVPictureInPictureController *AVPictureInPictureController;
//&
@property (nonatomic,retain) UIImageView *topView,*bottomView;
//
@property (nonatomic,assign) BOOL isInitPlayer;
//
@property (nonatomic,assign) BOOL hasMoved;
//
@property (nonatomic,assign)CGFloat totalTime;
//
@property (nonatomic,assign)CGFloat touchBeginValue;
//
@property (nonatomic,assign)CGFloat touchBeginLightValue;
//
@property (nonatomic,assign) CGFloat touchBeginVoiceValue;
//touch
@property (nonatomic,assign) CGPoint touchBeginPoint;
//,???
@property (nonatomic,assign) WMControlType controlType;
//
@property (nonatomic,strong) NSDateFormatter *dateFormatter;
//
@property (nonatomic,strong) id playbackTimeObserver;
//&
@property (nonatomic,strong) UITapGestureRecognizer *progressTap,*singleTap;
//
@property (nonatomic,assign) NSInteger dragingSliderStatus;//0120
//BOOL
@property (nonatomic,assign) BOOL isHiddenTopAndBottomView;
//BOOL
@property (nonatomic,assign) BOOL hiddenStatusBar;
//
@property (nonatomic,assign) BOOL isPauseBySystem;
//
@property (nonatomic,assign) WMPlayerState state;
//wmPlayerUIViewview
@property (nonatomic,strong) UIView *contentView;
//view
@property (nonatomic,strong) WMLightView * lightView;
//
@property (nonatomic,strong) FastForwardView * FF_View;
//UILabel+UILabel+title
@property (nonatomic,strong) UILabel *leftTimeLabel,*rightTimeLabel,*titleLabel,*loadFailedLabel;
//
@property (nonatomic,strong) UIButton *fullScreenBtn,*playOrPauseBtn,*lockBtn,*pipBtn,*backBtn,*rateBtn;
//&
@property (nonatomic,strong) UISlider *progressSlider,*volumeSlider;
//
@property (nonatomic,strong) UIProgressView *loadingProgress,*bottomProgress;
//
@property (nonatomic,strong) UIActivityIndicatorView *loadingView;
//item
@property (nonatomic,retain) AVPlayerItem *currentItem;
//playerLayer,frame
@property (nonatomic,retain) AVPlayerLayer *playerLayer;
//player
@property (nonatomic,retain) AVPlayer *player;
//URL
@property (nonatomic,strong) NSURL *videoURL;
//
@property (nonatomic,strong) AVURLAsset *urlAsset;
//time
@property (nonatomic,assign) double seekTime;
//
@property (nonatomic, copy) NSString *videoGravity;
@property (nonatomic,strong) UIView *airPlayView;
@end
@implementation WMPlayer
- (instancetype)initWithCoder:(NSCoder *)coder{
self = [super initWithCoder:coder];
if (self) {
[self initWMPlayer];
}
return self;
}
-(instancetype)initWithFrame:(CGRect)frame{
self = [super initWithFrame:frame];
if (self) {
[self initWMPlayer];
}
return self;
}
-(instancetype)initPlayerModel:(WMPlayerModel *)playerModel{
self = [super init];
if (self) {
self.playerModel = playerModel;
}
return self;
}
+(instancetype)playerWithModel:(WMPlayerModel *)playerModel{
WMPlayer *player = [[WMPlayer alloc] initPlayerModel:playerModel];
return player;
}
- (NSDateFormatter *)dateFormatter {
if (!_dateFormatter) {
_dateFormatter = [[NSDateFormatter alloc] init];
_dateFormatter.timeZone = [NSTimeZone timeZoneWithName:@"GMT"];
}
return _dateFormatter;
}
- (NSString *)videoGravity {
if (!_videoGravity) {
_videoGravity = AVLayerVideoGravityResizeAspect;
}
return _videoGravity;
}
- (void)setFrame:(CGRect)frame{
[super setFrame:frame];
if (!self.isFullscreen) {
self.originFrame = frame;
}
}
-(void)initWMPlayer{
[UIApplication sharedApplication].idleTimerDisabled=YES;
NSError *setCategoryErr = nil;
NSError *activationErr = nil;
[[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryPlayback error: &setCategoryErr];
[[AVAudioSession sharedInstance]setActive: YES error: &activationErr];
//wmplayerview
self.contentView = [UIView new];
self.contentView.backgroundColor = [UIColor blackColor];
[self addSubview:self.contentView];
self.backgroundColor = [UIColor blackColor];
//fastForwardViewview
self.FF_View = [[FastForwardView alloc] init];
self.FF_View.hidden = YES;
[self.contentView addSubview:self.FF_View];
self.lightView =[[WMLightView alloc] init];
[self.contentView addSubview:self.lightView];
//
self.enableVolumeGesture = YES;
self.enableFastForwardGesture = YES;
//
self.loadingView = [[UIActivityIndicatorView alloc] initWithActivityIndicatorStyle:UIActivityIndicatorViewStyleWhite];
[self.contentView addSubview:self.loadingView];
[self.loadingView startAnimating];
//topView
self.topView = [[UIImageView alloc]initWithImage:WMPlayerImage(@"top_shadow")];
self.topView.userInteractionEnabled = YES;
[self.contentView addSubview:self.topView];
//bottomView
self.bottomView = [[UIImageView alloc]initWithImage:WMPlayerImage(@"bottom_shadow")];
self.bottomView.userInteractionEnabled = YES;
[self.contentView addSubview:self.bottomView];
//playOrPauseBtn
self.playOrPauseBtn = [UIButton buttonWithType:UIButtonTypeCustom];
self.playOrPauseBtn.showsTouchWhenHighlighted = YES;
[self.playOrPauseBtn addTarget:self action:@selector(playOrPause:) forControlEvents:UIControlEventTouchUpInside];
[self.playOrPauseBtn setImage:WMPlayerImage(@"player_ctrl_icon_pause") forState:UIControlStateNormal];
[self.playOrPauseBtn setImage:WMPlayerImage(@"player_ctrl_icon_play") forState:UIControlStateSelected];
[self.bottomView addSubview:self.playOrPauseBtn];
self.playOrPauseBtn.selected = YES;//
MPVolumeView *volumeView = [[MPVolumeView alloc]init];
for (UIControl *view in volumeView.subviews) {
if ([view.superclass isSubclassOfClass:[UISlider class]]) {
self.volumeSlider = (UISlider *)view;
}
}
self.loadingProgress = [[UIProgressView alloc] initWithProgressViewStyle:UIProgressViewStyleDefault];
self.loadingProgress.progressTintColor = [UIColor colorWithRed:1 green:1 blue:1 alpha:0.5];
self.loadingProgress.trackTintColor = [UIColor clearColor];
[self.bottomView addSubview:self.loadingProgress];
[self.loadingProgress setProgress:0.0 animated:NO];
[self.bottomView sendSubviewToBack:self.loadingProgress];
//slider
self.progressSlider = [UISlider new];
self.progressSlider.minimumValue = 0.0;
self.progressSlider.maximumValue = 1.0;
[self.progressSlider setThumbImage:WMPlayerImage(@"dot") forState:UIControlStateNormal];
self.progressSlider.minimumTrackTintColor = self.tintColor?self.tintColor:[UIColor greenColor];
self.progressSlider.maximumTrackTintColor = [UIColor colorWithRed:0.5 green:0.5 blue:0.5 alpha:0.5];
self.progressSlider.backgroundColor = [UIColor clearColor];
self.progressSlider.value = 0.0;//
//
[self.progressSlider addTarget:self action:@selector(stratDragSlide:) forControlEvents:UIControlEventValueChanged];
//
[self.progressSlider addTarget:self action:@selector(updateProgress:) forControlEvents:UIControlEventTouchUpInside | UIControlEventTouchUpOutside];
//
self.progressTap = [[UITapGestureRecognizer alloc] initWithTarget:self action:@selector(actionTapGesture:)];
self.progressTap.delegate = self;
[self.progressSlider addGestureRecognizer:self.progressTap];
[self.bottomView addSubview:self.progressSlider];
self.bottomProgress = [[UIProgressView alloc] initWithProgressViewStyle:UIProgressViewStyleDefault];
self.bottomProgress.trackTintColor = [UIColor colorWithRed:1 green:1 blue:1 alpha:0.5];
self.bottomProgress.progressTintColor = self.tintColor?self.tintColor:[UIColor greenColor];
self.bottomProgress.alpha = 0;
[self.contentView addSubview:self.bottomProgress];
//fullScreenBtn
self.fullScreenBtn = [UIButton buttonWithType:UIButtonTypeCustom];
self.fullScreenBtn.showsTouchWhenHighlighted = YES;
[self.fullScreenBtn addTarget:self action:@selector(fullScreenAction:) forControlEvents:UIControlEventTouchUpInside];
[self.fullScreenBtn setImage:WMPlayerImage(@"player_icon_fullscreen") forState:UIControlStateNormal];
[self.fullScreenBtn setImage:WMPlayerImage(@"player_icon_fullscreen") forState:UIControlStateSelected];
[self.bottomView addSubview:self.fullScreenBtn];
//lockBtn
self.lockBtn = [UIButton buttonWithType:UIButtonTypeCustom];
self.lockBtn.showsTouchWhenHighlighted = YES;
[self.lockBtn addTarget:self action:@selector(lockAction:) forControlEvents:UIControlEventTouchUpInside];
[self.lockBtn setImage:WMPlayerImage(@"player_icon_unlock") forState:UIControlStateNormal];
[self.lockBtn setImage:WMPlayerImage(@"player_icon_lock") forState:UIControlStateSelected];
self.lockBtn.hidden = YES;
[self.contentView addSubview:self.lockBtn];
//PictureInPicturePIPpipBtn
self.pipBtn = [UIButton buttonWithType:UIButtonTypeCustom];
self.pipBtn.showsTouchWhenHighlighted = YES;
[self.pipBtn addTarget:self action:@selector(pipAction:) forControlEvents:UIControlEventTouchUpInside];
[self.pipBtn setImage:WMPlayerImage(@"pip.jpg") forState:UIControlStateNormal];
[self.pipBtn setImage:WMPlayerImage(@"pip.jpg") forState:UIControlStateSelected];
self.pipBtn.hidden = NO;
[self.contentView addSubview:self.pipBtn];
//leftTimeLabel
self.leftTimeLabel = [UILabel new];
self.leftTimeLabel.textAlignment = NSTextAlignmentLeft;
self.leftTimeLabel.textColor = [UIColor whiteColor];
self.leftTimeLabel.font = [UIFont systemFontOfSize:11];
[self.bottomView addSubview:self.leftTimeLabel];
self.leftTimeLabel.text = [self convertTime:0.0];//
//rightTimeLabel
self.rightTimeLabel = [UILabel new];
self.rightTimeLabel.textAlignment = NSTextAlignmentRight;
self.rightTimeLabel.textColor = [UIColor whiteColor];
self.rightTimeLabel.font = [UIFont systemFontOfSize:11];
[self.bottomView addSubview:self.rightTimeLabel];
//backBtn
self.backBtn = [UIButton buttonWithType:UIButtonTypeCustom];
self.backBtn.showsTouchWhenHighlighted = YES;
[self.backBtn setImage:WMPlayerImage(@"player_icon_nav_back.png") forState:UIControlStateNormal];
[self.backBtn setImage:WMPlayerImage(@"player_icon_nav_back.png") forState:UIControlStateSelected];
[self.backBtn addTarget:self action:@selector(colseTheVideo:) forControlEvents:UIControlEventTouchUpInside];
[self.topView addSubview:self.backBtn];
//rateBtn
self.rateBtn = [UIButton buttonWithType:UIButtonTypeCustom];
[self.rateBtn addTarget:self action:@selector(switchRate:) forControlEvents:UIControlEventTouchUpInside];
[self.rateBtn setTitle:@"" forState:UIControlStateNormal];
[self.rateBtn setTitle:@"" forState:UIControlStateSelected];
self.rateBtn.titleLabel.font = [UIFont systemFontOfSize:15.f];
self.rateBtn.titleLabel.textAlignment = NSTextAlignmentRight;
[self.bottomView addSubview:self.rateBtn];
self.rateBtn.hidden = YES;
self.rate = 1.0;//
if (@available(iOS 11.0, *)) {
AVRoutePickerView *airPlayView = [[AVRoutePickerView alloc]initWithFrame:CGRectMake(0, 0, 35, 35)];
//
airPlayView.activeTintColor = [UIColor whiteColor];
//
airPlayView.delegate = self;
[self.topView addSubview:airPlayView];
self.airPlayView = airPlayView;
} else {
MPVolumeView *airplay = [[MPVolumeView alloc] initWithFrame:CGRectMake(0, 0, 35, 35)];
airplay.showsVolumeSlider = NO;
airplay.backgroundColor = [UIColor whiteColor];
[self.topView addSubview:airplay];
self.airPlayView = airplay;
}
self.enableAirPlay = NO;
//titleLabel
self.titleLabel = [UILabel new];
self.titleLabel.textColor = [UIColor whiteColor];
self.titleLabel.font = [UIFont systemFontOfSize:15.0];
[self.topView addSubview:self.titleLabel];
//
self.loadFailedLabel = [UILabel new];
self.loadFailedLabel.textColor = [UIColor lightGrayColor];
self.loadFailedLabel.textAlignment = NSTextAlignmentCenter;
self.loadFailedLabel.text = @"";
self.loadFailedLabel.hidden = YES;
[self.contentView addSubview:self.loadFailedLabel];
[self.loadFailedLabel sizeToFit];
// Recognizer
self.singleTap = [[UITapGestureRecognizer alloc] initWithTarget:self action:@selector(handleSingleTap:)];
self.singleTap.numberOfTapsRequired = 1; //
self.singleTap.numberOfTouchesRequired = 1;
self.singleTap.delegate = self;
[self.contentView addGestureRecognizer:self.singleTap];
// Recognizer
UITapGestureRecognizer* doubleTap = [[UITapGestureRecognizer alloc] initWithTarget:self action:@selector(handleDoubleTap:)];
doubleTap.numberOfTouchesRequired = 1; //
doubleTap.numberOfTapsRequired = 2; //
doubleTap.delegate = self;
// view
[self.singleTap setDelaysTouchesBegan:YES];
[doubleTap setDelaysTouchesBegan:YES];
[self.singleTap requireGestureRecognizerToFail:doubleTap];//
[self.contentView addGestureRecognizer:doubleTap];
}
#pragma mark - Gesture Delegate
- (BOOL)gestureRecognizer:(UIGestureRecognizer *)gestureRecognizer shouldReceiveTouch:(UITouch *)touch {
if ([touch.view isKindOfClass:[UIControl class]]) {
return NO;
}
return YES;
}
-(void)setRate:(CGFloat)rate{
_rate = rate;
self.player.rate = rate;
self.state = WMPlayerStatePlaying;
self.playOrPauseBtn.selected = NO;
if(rate==1.25){
[self.rateBtn setTitle:[NSString stringWithFormat:@"%.2fX",rate] forState:UIControlStateNormal];
[self.rateBtn setTitle:[NSString stringWithFormat:@"%.2fX",rate] forState:UIControlStateSelected];
}else{
if (rate==1.0) {
[self.rateBtn setTitle:@"" forState:UIControlStateNormal];
[self.rateBtn setTitle:@"" forState:UIControlStateSelected];
}else{
[self.rateBtn setTitle:[NSString stringWithFormat:@"%.1fX",rate] forState:UIControlStateNormal];
[self.rateBtn setTitle:[NSString stringWithFormat:@"%.1fX",rate] forState:UIControlStateSelected];
}
}
}
//
-(void)switchRate:(UIButton *)rateBtn{
CGFloat rate = 1.0f;
if (![rateBtn.currentTitle isEqualToString:@""]) {
rate = [rateBtn.currentTitle floatValue];
}
if(rate==0.5){
rate+=0.5;
}else if(rate==1.0){
rate+=0.25;
}else if(rate==1.25){
rate+=0.25;
}else if(rate==1.5){
rate+=0.5;
}else if(rate==2){
rate=0.5;
}
self.rate = rate;
}
#pragma mark
#pragma mark - layoutSubviews
-(void)layoutSubviews{
[super layoutSubviews];
self.contentView.frame = self.bounds;
self.playerLayer.frame = self.contentView.bounds;
CGFloat iphoneX_margin = [WMPlayer IsiPhoneX]?60:20;
self.FF_View.frame = CGRectMake(0, 0, 120, 70);
self.FF_View.center = self.contentView.center;
self.loadingView.center = self.contentView.center;
self.topView.frame = CGRectMake(0, 0, self.contentView.frame.size.width, 70);
self.backBtn.frame = CGRectMake(self.isFullscreen?([WMPlayer IsiPhoneX]?60:30):10, self.topView.frame.size.height/2-(self.backBtn.currentImage.size.height+4)/2, self.backBtn.currentImage.size.width+6, self.backBtn.currentImage.size.height+4);
self.titleLabel.frame = CGRectMake(CGRectGetMaxX(self.backBtn.frame)+5, 0, self.topView.frame.size.width-CGRectGetMaxX(self.backBtn.frame)-20-50, self.topView.frame.size.height);
if (self.isFullscreen) {
self.bottomView.frame = CGRectMake(self.topView.frame.origin.x, self.contentView.frame.size.height-105, self.topView.frame.size.width, 105);
self.progressSlider.frame = CGRectMake(iphoneX_margin, 0, self.bottomView.frame.size.width-iphoneX_margin*2, 30);
self.loadingProgress.frame = CGRectMake(iphoneX_margin+2, CGRectGetMaxY(self.progressSlider.frame)-30/2-2, self.bottomView.frame.size.width-iphoneX_margin*2-2, 1);
self.playOrPauseBtn.frame = CGRectMake(iphoneX_margin, CGRectGetMaxY(self.progressSlider.frame)+15, self.playOrPauseBtn.currentImage.size.width, self.playOrPauseBtn.currentImage.size.height);
self.leftTimeLabel.frame = CGRectMake(CGRectGetMaxX(self.playOrPauseBtn.frame)+10, CGRectGetMaxY(self.playOrPauseBtn.frame)-self.playOrPauseBtn.frame.size.height/2-20/2, 100, 20);
self.rightTimeLabel.frame = CGRectMake(CGRectGetMaxX(self.leftTimeLabel.frame)+1, self.leftTimeLabel.frame.origin.y, self.leftTimeLabel.frame.size.width, self.leftTimeLabel.frame.size.height);
self.rateBtn.frame = CGRectMake(self.bottomView.frame.size.width-iphoneX_margin-45, self.playOrPauseBtn.frame.origin.y, 45, 30);
}else{
self.bottomView.frame = CGRectMake(self.topView.frame.origin.x, self.contentView.frame.size.height-70, self.topView.frame.size.width, 70);
self.playOrPauseBtn.frame = CGRectMake(10, self.bottomView.frame.size.height/2-self.playOrPauseBtn.currentImage.size.height/2, self.playOrPauseBtn.currentImage.size.width, self.playOrPauseBtn.currentImage.size.height);
self.leftTimeLabel.frame = CGRectMake(CGRectGetMaxX(self.playOrPauseBtn.frame)+5, self.bottomView.frame.size.height/2+8, 100, 20);
self.rightTimeLabel.frame = CGRectMake(self.bottomView.frame.size.width-self.leftTimeLabel.frame.origin.x-self.leftTimeLabel.frame.size.width, self.bottomView.frame.size.height/2+8, self.leftTimeLabel.frame.size.width, self.leftTimeLabel.frame.size.height);
self.loadingProgress.frame = CGRectMake(self.leftTimeLabel.frame.origin.x, self.bottomView.frame.size.height/2-2, self.bottomView.frame.size.width-(self.leftTimeLabel.frame.origin.x)*2, 1);
self.progressSlider.frame = CGRectMake(self.leftTimeLabel.frame.origin.x-3, self.bottomView.frame.size.height/2-30/2, self.bottomView.frame.size.width-(self.leftTimeLabel.frame.origin.x)*2+6, 30);
self.rateBtn.frame = CGRectMake(self.bottomView.frame.size.width-self.playOrPauseBtn.frame.origin.x, self.playOrPauseBtn.frame.origin.y, 45, 30);
}
self.lockBtn.frame = CGRectMake(iphoneX_margin, self.contentView.frame.size.height/2-self.lockBtn.frame.size.height/2, self.lockBtn.currentImage.size.width, self.lockBtn.currentImage.size.height);
self.pipBtn.frame = CGRectMake(self.contentView.frame.size.width-40, self.contentView.frame.size.height/2-self.lockBtn.frame.size.height/2, self.lockBtn.currentImage.size.width, self.lockBtn.currentImage.size.height);
self.fullScreenBtn.frame = CGRectMake(self.bottomView.frame.size.width-10-self.fullScreenBtn.currentImage.size.width, self.playOrPauseBtn.frame.origin.y, self.fullScreenBtn.currentImage.size.width, self.fullScreenBtn.currentImage.size.height);
self.bottomProgress.frame = CGRectMake(iphoneX_margin, self.contentView.frame.size.height-2, self.bottomView.frame.size.width-iphoneX_margin*2, 1);
self.loadFailedLabel.center = self.contentView.center;
}
#pragma mark
#pragma mark
- (void)appDidEnterBackground:(NSNotification*)note{
if (self.state==WMPlayerStateFinished) {
return;
}else if (self.state==WMPlayerStateStopped) {//
self.isPauseBySystem = NO;
}else if(self.state==WMPlayerStatePlaying){
if (self.enableBackgroundMode) {
self.playerLayer.player = nil;
[self.playerLayer removeFromSuperlayer];
if(![self.rateBtn.currentTitle isEqualToString:@""]){
self.rate = [self.rateBtn.currentTitle floatValue];
}
}else{
self.isPauseBySystem = YES;
[self pause];
self.state = WMPlayerStatePause;
}
}
}
-(void)setTintColor:(UIColor *)tintColor{
_tintColor = tintColor;
self.progressSlider.minimumTrackTintColor = self.tintColor;
self.bottomProgress.progressTintColor = self.tintColor;
}
#pragma mark
#pragma mark
- (void)appWillEnterForeground:(NSNotification*)note{
if (self.state==WMPlayerStateFinished) {
if (self.enableBackgroundMode) {
self.playerLayer = [AVPlayerLayer playerLayerWithPlayer:self.player];
self.playerLayer.frame = self.contentView.bounds;
self.playerLayer.videoGravity = AVLayerVideoGravityResizeAspect;
[self.contentView.layer insertSublayer:self.playerLayer atIndex:0];
}else{
return;
}
}else if(self.state==WMPlayerStateStopped){
return;
}else if(self.state==WMPlayerStatePause){
if (self.isPauseBySystem) {
self.isPauseBySystem = NO;
[self play];
}
}else if(self.state==WMPlayerStatePlaying){
if (self.enableBackgroundMode) {
self.playerLayer = [AVPlayerLayer playerLayerWithPlayer:self.player];
self.playerLayer.frame = self.contentView.bounds;
self.playerLayer.videoGravity = AVLayerVideoGravityResizeAspect;
[self.contentView.layer insertSublayer:self.playerLayer atIndex:0];
[self.player play];
if(![self.rateBtn.currentTitle isEqualToString:@""]){
self.rate = [self.rateBtn.currentTitle floatValue];
}
}else{
return;
}
}
}
//
- (void)actionTapGesture:(UITapGestureRecognizer *)sender {
CGPoint touchLocation = [sender locationInView:self.progressSlider];
CGFloat value = (self.progressSlider.maximumValue - self.progressSlider.minimumValue) * (touchLocation.x/self.progressSlider.frame.size.width);
[self.progressSlider setValue:value animated:YES];
self.bottomProgress.progress = self.progressSlider.value;
[self.player seekToTime:CMTimeMakeWithSeconds(self.progressSlider.value, self.currentItem.currentTime.timescale)];
if (self.player.rate != 1.f) {
self.playOrPauseBtn.selected = NO;
[self.player play];
}
}
//AirPlay
- (void)routePickerViewWillBeginPresentingRoutes:(AVRoutePickerView *)routePickerView API_AVAILABLE(ios(11.0)){
NSLog(@"AirPlay %@",[routePickerView valueForKey:@"airPlayActive"]);
}
//AirPlay
- (void)routePickerViewDidEndPresentingRoutes:(AVRoutePickerView *)routePickerView API_AVAILABLE(ios(11.0)){
NSLog(@"AirPlay %@",[routePickerView valueForKey:@"airPlayActive"]);
}
-(void)pipAction:(UIButton *)sender{
if (_AVPictureInPictureController.pictureInPictureActive) {
[_AVPictureInPictureController stopPictureInPicture];
} else {
[_AVPictureInPictureController startPictureInPicture];
}
}
#pragma mark
#pragma mark -
-(void)lockAction:(UIButton *)sender{
sender.selected = !sender.selected;
self.isLockScreen = sender.selected;
if (self.delegate&&[self.delegate respondsToSelector:@selector(wmplayer:clickedLockButton:)]) {
[self.delegate wmplayer:self clickedLockButton:sender];
}
}
#pragma mark
#pragma mark - func
-(void)fullScreenAction:(UIButton *)sender{
sender.selected = !sender.selected;
if (self.delegate&&[self.delegate respondsToSelector:@selector(wmplayer:clickedFullScreenButton:)]) {
[self.delegate wmplayer:self clickedFullScreenButton:sender];
}
}
#pragma mark
#pragma mark - func
-(void)colseTheVideo:(UIButton *)sender{
if (self.delegate&&[self.delegate respondsToSelector:@selector(wmplayer:clickedCloseButton:)]) {
[self.delegate wmplayer:self clickedCloseButton:sender];
}
}
//
- (double)duration{
AVPlayerItem *playerItem = self.player.currentItem;
if (playerItem.status == AVPlayerItemStatusReadyToPlay){
return CMTimeGetSeconds([[playerItem asset] duration]);
}else{
return 0.f;
}
}
//
- (double)currentTime{
if (self.player) {
return CMTimeGetSeconds([self.player currentTime]);
}else{
return 0.0;
}
}
#pragma mark
#pragma mark - PlayOrPause
- (void)playOrPause:(UIButton *)sender{
if (self.state==WMPlayerStateStopped||self.state==WMPlayerStateFailed) {
[self play];
if(![self.rateBtn.currentTitle isEqualToString:@""]){
self.rate = [self.rateBtn.currentTitle floatValue];
}else{
self.rate = 1.0f;
}
} else if(self.state==WMPlayerStatePlaying){
[self pause];
}else if(self.state ==WMPlayerStateFinished){
if(![self.rateBtn.currentTitle isEqualToString:@""]){
self.rate = [self.rateBtn.currentTitle floatValue];
}else{
self.rate = 1.0f;
}
}else if(self.state==WMPlayerStatePause){
if(![self.rateBtn.currentTitle isEqualToString:@""]){
self.rate = [self.rateBtn.currentTitle floatValue];
}else{
self.rate = 1.0f;
}
}
if ([self.delegate respondsToSelector:@selector(wmplayer:clickedPlayOrPauseButton:)]) {
[self.delegate wmplayer:self clickedPlayOrPauseButton:sender];
}
}
//
-(void)play{
if (self.isInitPlayer == NO) {
[self creatWMPlayerAndReadyToPlay];
self.playOrPauseBtn.selected = NO;
}else{
if (self.state==WMPlayerStateStopped||self.state ==WMPlayerStatePause) {
self.state = WMPlayerStatePlaying;
self.playOrPauseBtn.selected = NO;
[self.player play];
}else if(self.state ==WMPlayerStateFinished){
NSLog(@"fffff");
}
}
}
//
-(void)pause{
if (self.state==WMPlayerStatePlaying) {
self.state = WMPlayerStateStopped;
}
[self.player pause];
self.playOrPauseBtn.selected = YES;
}
-(void)setEnableAirPlay:(BOOL)enableAirPlay{
_enableAirPlay = enableAirPlay;
self.airPlayView.hidden= !enableAirPlay;
}
-(void)setPrefersStatusBarHidden:(BOOL)prefersStatusBarHidden{
_prefersStatusBarHidden = prefersStatusBarHidden;
}
#pragma mark
#pragma mark -
- (void)handleSingleTap:(UITapGestureRecognizer *)sender{
if (self.isLockScreen) {
if (self.lockBtn.alpha) {
self.lockBtn.alpha = 0.0;
self.prefersStatusBarHidden = self.hiddenStatusBar = YES;
}else{
self.lockBtn.alpha = 1.0;
self.prefersStatusBarHidden = self.hiddenStatusBar = NO;
[NSObject cancelPreviousPerformRequestsWithTarget:self selector:@selector(hiddenLockBtn) object:nil];
[self performSelector:@selector(hiddenLockBtn) withObject:nil afterDelay:5.0];
}
}else{
}
if (self.delegate&&[self.delegate respondsToSelector:@selector(wmplayer:singleTaped:)]) {
[self.delegate wmplayer:self singleTaped:sender];
}
if (self.isLockScreen) {
return;
}
[self dismissControlView];
[UIView animateWithDuration:0.5 animations:^{
if (self.bottomView.alpha == 0.0) {
[self showControlView];
}else{
[self hiddenControlView];
}
} completion:^(BOOL finish){
}];
}
#pragma mark
#pragma mark -
- (void)handleDoubleTap:(UITapGestureRecognizer *)doubleTap{
if (self.delegate&&[self.delegate respondsToSelector:@selector(wmplayer:doubleTaped:)]) {
[self.delegate wmplayer:self doubleTaped:doubleTap];
}
}
-(void)setCurrentItem:(AVPlayerItem *)playerItem{
if (_currentItem==playerItem) {
return;
}
if (_currentItem) {
[[NSNotificationCenter defaultCenter] removeObserver:self name:AVPlayerItemDidPlayToEndTimeNotification object:_currentItem];
[_currentItem removeObserver:self forKeyPath:@"status"];
[_currentItem removeObserver:self forKeyPath:@"loadedTimeRanges"];
[_currentItem removeObserver:self forKeyPath:@"playbackBufferEmpty"];
[_currentItem removeObserver:self forKeyPath:@"playbackLikelyToKeepUp"];
[_currentItem removeObserver:self forKeyPath:@"duration"];
[_currentItem removeObserver:self forKeyPath:@"presentationSize"];
_currentItem = nil;
}
_currentItem = playerItem;
if (_currentItem) {
[_currentItem addObserver:self
forKeyPath:@"status"
options:NSKeyValueObservingOptionNew
context:PlayViewStatusObservationContext];
[_currentItem addObserver:self forKeyPath:@"loadedTimeRanges" options:NSKeyValueObservingOptionNew context:PlayViewStatusObservationContext];
//
[_currentItem addObserver:self forKeyPath:@"playbackBufferEmpty" options: NSKeyValueObservingOptionNew context:PlayViewStatusObservationContext];
//
[_currentItem addObserver:self forKeyPath:@"playbackLikelyToKeepUp" options: NSKeyValueObservingOptionNew context:PlayViewStatusObservationContext];
[_currentItem addObserver:self forKeyPath:@"duration" options:NSKeyValueObservingOptionNew context:PlayViewStatusObservationContext];
[_currentItem addObserver:self forKeyPath:@"presentationSize" options:NSKeyValueObservingOptionNew context:PlayViewStatusObservationContext];
[self.player replaceCurrentItemWithPlayerItem:_currentItem];
//
[[NSNotificationCenter defaultCenter]addObserver:self selector:@selector(moviePlayDidEnd:) name:AVPlayerItemDidPlayToEndTimeNotification object:_currentItem];
}
}
-(void)setupSuport {
if([AVPictureInPictureController isPictureInPictureSupported]) {
_AVPictureInPictureController = [[AVPictureInPictureController alloc] initWithPlayerLayer:self.playerLayer];
_AVPictureInPictureController.delegate = self;
} else {
// not supported PIP start button desable here
}
}
//
- (void)setMuted:(BOOL)muted{
_muted = muted;
self.player.muted = muted;
}
//playerLayer
- (void)setPlayerLayerGravity:(WMPlayerLayerGravity)playerLayerGravity {
_playerLayerGravity = playerLayerGravity;
switch (playerLayerGravity) {
case WMPlayerLayerGravityResize:
self.playerLayer.videoGravity = AVLayerVideoGravityResize;
self.videoGravity = AVLayerVideoGravityResize;
break;
case WMPlayerLayerGravityResizeAspect:
self.playerLayer.videoGravity = AVLayerVideoGravityResizeAspect;
self.videoGravity = AVLayerVideoGravityResizeAspect;
break;
case WMPlayerLayerGravityResizeAspectFill:
self.playerLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
self.videoGravity = AVLayerVideoGravityResizeAspectFill;
break;
default:
break;
}
}
-(void)setIsLockScreen:(BOOL)isLockScreen{
_isLockScreen = isLockScreen;
self.prefersStatusBarHidden = self.hiddenStatusBar = isLockScreen;
if (isLockScreen) {
[self hiddenControlView];
}else{
[NSObject cancelPreviousPerformRequestsWithTarget:self selector:@selector(hiddenLockBtn) object:nil];
[self showControlView];
[self dismissControlView];
}
}
//playerModelsetter
-(void)setPlayerModel:(WMPlayerModel *)playerModel{
if (_playerModel==playerModel) {
return;
}
_playerModel = playerModel;
self.isPauseBySystem = NO;
self.seekTime = playerModel.seekTime;
self.titleLabel.text = playerModel.title;
if(playerModel.playerItem){
self.currentItem = playerModel.playerItem;
}else{
self.videoURL = playerModel.videoURL;
}
if (self.isInitPlayer) {
self.state = WMPlayerStateBuffering;
}else{
self.state = WMPlayerStateStopped;
[self.loadingView stopAnimating];
}
}
-(void)creatWMPlayerAndReadyToPlay{
self.isInitPlayer = YES;
[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(appDidEnterBackground:) name:UIApplicationDidEnterBackgroundNotification object:nil];
[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(appWillEnterForeground:) name:UIApplicationWillEnterForegroundNotification object:nil];
//player
if(self.currentItem){
self.player = [AVPlayer playerWithPlayerItem:self.currentItem];
}else{
self.urlAsset = [AVURLAsset assetWithURL:self.videoURL];
self.currentItem = [AVPlayerItem playerItemWithAsset:self.urlAsset];
self.player = [AVPlayer playerWithPlayerItem:self.currentItem];
}
if(self.loopPlay){
self.player.actionAtItemEnd = AVPlayerActionAtItemEndNone;
}else{
self.player.actionAtItemEnd = AVPlayerActionAtItemEndPause;
}
//ios10
if ([self.player respondsToSelector:@selector(automaticallyWaitsToMinimizeStalling)]) {
self.player.automaticallyWaitsToMinimizeStalling = NO;
}
self.player.usesExternalPlaybackWhileExternalScreenIsActive=YES;
//AVPlayerLayer
self.playerLayer = [AVPlayerLayer playerLayerWithPlayer:self.player];
//WMPlayerAVLayerVideoGravityResizeAspect
self.playerLayer.frame = self.contentView.layer.bounds;
self.playerLayer.videoGravity = self.videoGravity;
[self.contentView.layer insertSublayer:self.playerLayer atIndex:0];
self.state = WMPlayerStateBuffering;
//
[self initTimer];
[self.player play];
//
[self setupSuport];
}
+(BOOL)IsiPhoneX{
BOOL iPhoneXSeries = NO;
if (UIDevice.currentDevice.userInterfaceIdiom != UIUserInterfaceIdiomPhone) {
return iPhoneXSeries;
}
if((ABS(MAX(CGRectGetWidth([UIScreen mainScreen].bounds), CGRectGetHeight([UIScreen mainScreen].bounds)) / MIN(CGRectGetWidth([UIScreen mainScreen].bounds), CGRectGetHeight([UIScreen mainScreen].bounds)) - 896 / 414.0) < 0.01)||(ABS(MAX(CGRectGetWidth([UIScreen mainScreen].bounds), CGRectGetHeight([UIScreen mainScreen].bounds)) / MIN(CGRectGetWidth([UIScreen mainScreen].bounds), CGRectGetHeight([UIScreen mainScreen].bounds)) - 812 / 375.0) < 0.01))
{
return iPhoneXSeries = YES;
}
return iPhoneXSeries;
}
//
-(void)setIsFullscreen:(BOOL)isFullscreen{
_isFullscreen = isFullscreen;
self.rateBtn.hidden = self.lockBtn.hidden = !isFullscreen;
self.fullScreenBtn.hidden = self.fullScreenBtn.selected= isFullscreen;
if (isFullscreen) {
self.backBtnStyle = BackBtnStylePop;
CGFloat w = [UIScreen mainScreen].bounds.size.width;
CGFloat h = [UIScreen mainScreen].bounds.size.height;
self.frame = CGRectMake(0, 0, MAX(w, h), MIN(w, h));
self.bottomProgress.alpha = self.isLockScreen?1.0f:0.f;
}else{
self.bottomProgress.alpha = 0.0;
self.frame = self.originFrame;
}
}
-(void)setBackBtnStyle:(BackBtnStyle)backBtnStyle{
_backBtnStyle = backBtnStyle;
if (backBtnStyle==BackBtnStylePop) {
[self.backBtn setImage:WMPlayerImage(@"player_icon_nav_back.png") forState:UIControlStateNormal];
[self.backBtn setImage:WMPlayerImage(@"player_icon_nav_back.png") forState:UIControlStateSelected];
}else if(backBtnStyle==BackBtnStyleClose){
[self.backBtn setImage:WMPlayerImage(@"close.png") forState:UIControlStateNormal];
[self.backBtn setImage:WMPlayerImage(@"close.png") forState:UIControlStateSelected];
}else{
[self.backBtn setImage:nil forState:UIControlStateNormal];
[self.backBtn setImage:nil forState:UIControlStateSelected];
}
}
-(void)setIsHiddenTopAndBottomView:(BOOL)isHiddenTopAndBottomView{
_isHiddenTopAndBottomView = isHiddenTopAndBottomView;
self.prefersStatusBarHidden = isHiddenTopAndBottomView;
}
-(void)setLoopPlay:(BOOL)loopPlay{
_loopPlay = loopPlay;
if(self.player){
if(loopPlay){
self.player.actionAtItemEnd = AVPlayerActionAtItemEndNone;
}else{
self.player.actionAtItemEnd = AVPlayerActionAtItemEndPause;
}
}
}
//
- (void)setState:(WMPlayerState)state{
_state = state;
//
if (state == WMPlayerStateBuffering) {
[self.loadingView startAnimating];
}else if(state == WMPlayerStatePlaying){
[self.loadingView stopAnimating];
}else if(state == WMPlayerStatePause){
[self.loadingView stopAnimating];
}else{
[self.loadingView stopAnimating];
}
}
#pragma mark
#pragma mark--
- (void)moviePlayDidEnd:(NSNotification *)notification {
if (self.delegate&&[self.delegate respondsToSelector:@selector(wmplayerFinishedPlay:)]) {
[self.delegate wmplayerFinishedPlay:self];
}
[self.player seekToTime:kCMTimeZero toleranceBefore:kCMTimeZero toleranceAfter:kCMTimeZero completionHandler:^(BOOL finished) {
if (finished) {
if (self.isLockScreen) {
[self lockAction:self.lockBtn];
}else{
[self showControlView];
}
if(!self.loopPlay){
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(0.2 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{
self.state = WMPlayerStateFinished;
self.bottomProgress.progress = 0;
self.playOrPauseBtn.selected = YES;
});
}
}
}];
}
//view
-(void)showControlView{
[UIView animateWithDuration:0.5 animations:^{
self.bottomView.alpha = 1.0;
self.topView.alpha = 1.0;
self.lockBtn.alpha = 1.0;
self.bottomProgress.alpha = 0.f;
self.isHiddenTopAndBottomView = NO;
if (self.delegate&&[self.delegate respondsToSelector:@selector(wmplayer:isHiddenTopAndBottomView:)]) {
[self.delegate wmplayer:self isHiddenTopAndBottomView:self.isHiddenTopAndBottomView];
}
} completion:^(BOOL finish){
}];
}
-(void)hiddenLockBtn{
self.lockBtn.alpha = 0.0;
self.prefersStatusBarHidden = self.hiddenStatusBar = YES;
if (self.delegate&&[self.delegate respondsToSelector:@selector(wmplayer:singleTaped:)]) {
[self.delegate wmplayer:self singleTaped:self.singleTap];
}
}
//view
-(void)hiddenControlView{
[UIView animateWithDuration:0.5 animations:^{
self.bottomView.alpha = 0.0;
self.topView.alpha = 0.0;
if (self.isLockScreen) {
self.bottomProgress.alpha = 1.0;
//5s hiddenLockBtn
[NSObject cancelPreviousPerformRequestsWithTarget:self selector:@selector(hiddenLockBtn) object:nil];
[self performSelector:@selector(hiddenLockBtn) withObject:nil afterDelay:5.0];
}else{
self.lockBtn.alpha = 0.0;
self.bottomProgress.alpha = 0.f;
}
self.isHiddenTopAndBottomView = YES;
if (self.delegate&&[self.delegate respondsToSelector:@selector(wmplayer:isHiddenTopAndBottomView:)]) {
[self.delegate wmplayer:self isHiddenTopAndBottomView:self.isHiddenTopAndBottomView];
}
} completion:^(BOOL finish){
}];
}
-(void)addSubview:(UIView *)view{
[super addSubview:view];
self.parentView = view;
}
#pragma mark
#pragma mark--sidle
- (void)stratDragSlide:(UISlider *)slider{
self.dragingSliderStatus = 1;
}
#pragma mark
#pragma mark -
- (void)updateProgress:(UISlider *)slider{
//slider
[self.player seekToTime:CMTimeMakeWithSeconds(slider.value, self.currentItem.currentTime.timescale) toleranceBefore:kCMTimeZero toleranceAfter:kCMTimeZero completionHandler:^(BOOL finished) {
self.dragingSliderStatus = 2;
}];
}
-(void)dismissControlView{
[NSObject cancelPreviousPerformRequestsWithTarget:self selector:@selector(autoDismissControlView) object:nil];
[self performSelector:@selector(autoDismissControlView) withObject:nil afterDelay:5.0];
}
#pragma mark
#pragma mark KVO
- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context{
/* AVPlayerItem "status" property value observer. */
if (context == PlayViewStatusObservationContext){
if ([keyPath isEqualToString:@"status"]) {
AVPlayerItemStatus status = [[change objectForKey:NSKeyValueChangeNewKey] integerValue];
switch (status){
case AVPlayerItemStatusUnknown:{
[self.loadingProgress setProgress:0.0 animated:NO];
self.state = WMPlayerStateBuffering;
[self.loadingView startAnimating];
}
break;
case AVPlayerItemStatusReadyToPlay:{
/* Once the AVPlayerItem becomes ready to play, i.e.
[playerItem status] == AVPlayerItemStatusReadyToPlay,
its duration can be fetched from the item. */
if (self.state==WMPlayerStateStopped||self.state==WMPlayerStatePause) {
}else{
//5s dismiss controlView
[self dismissControlView];
self.state=WMPlayerStatePlaying;
}
if (self.delegate&&[self.delegate respondsToSelector:@selector(wmplayerReadyToPlay:WMPlayerStatus:)]) {
[self.delegate wmplayerReadyToPlay:self WMPlayerStatus:WMPlayerStatePlaying];
}
[self.loadingView stopAnimating];
if (self.seekTime) {
[self seekToTimeToPlay:self.seekTime];
}
if (self.muted) {
self.player.muted = self.muted;
}
if (self.state==WMPlayerStateStopped||self.state==WMPlayerStatePause) {
}else{
if(![self.rateBtn.currentTitle isEqualToString:@""]){
self.rate = [self.rateBtn.currentTitle floatValue];
}
}
}
break;
case AVPlayerItemStatusFailed:{
self.state = WMPlayerStateFailed;
if (self.delegate&&[self.delegate respondsToSelector:@selector(wmplayerFailedPlay:WMPlayerStatus:)]) {
[self.delegate wmplayerFailedPlay:self WMPlayerStatus:WMPlayerStateFailed];
}
NSError *error = [self.player.currentItem error];
if (error) {
self.loadFailedLabel.hidden = NO;
[self bringSubviewToFront:self.loadFailedLabel];
//here
[self.loadingView stopAnimating];
}
NSLog(@"===%@",error.description);
}
break;
}
}else if ([keyPath isEqualToString:@"duration"]) {
if ((CGFloat)CMTimeGetSeconds(self.currentItem.duration) != self.totalTime) {
self.totalTime = (CGFloat) CMTimeGetSeconds(self.currentItem.asset.duration);
if (!isnan(self.totalTime)) {
self.progressSlider.maximumValue = self.totalTime;
}else{
self.totalTime = MAXFLOAT;
}
if (self.state==WMPlayerStateStopped||self.state==WMPlayerStatePause) {
}else{
self.state = WMPlayerStatePlaying;
}
}
}else if ([keyPath isEqualToString:@"presentationSize"]) {
self.playerModel.presentationSize = self.currentItem.presentationSize;
if (self.delegate&&[self.delegate respondsToSelector:@selector(wmplayerGotVideoSize:videoSize:)]) {
[self.delegate wmplayerGotVideoSize:self videoSize:self.playerModel.presentationSize];
}
}else if ([keyPath isEqualToString:@"loadedTimeRanges"]) {
//
NSTimeInterval timeInterval = [self availableDuration];
CMTime duration = self.currentItem.duration;
CGFloat totalDuration = CMTimeGetSeconds(duration);
//
self.loadingProgress.progressTintColor = [UIColor colorWithRed:1.0 green:1.0 blue:1.0 alpha:0.7];
[self.loadingProgress setProgress:timeInterval / totalDuration animated:NO];
} else if ([keyPath isEqualToString:@"playbackBufferEmpty"]) {
[self.loadingView startAnimating];
//
if (self.currentItem.playbackBufferEmpty) {
NSLog(@"%s WMPlayerStateBuffering",__FUNCTION__);
[self loadedTimeRanges];
}
}else if ([keyPath isEqualToString:@"playbackLikelyToKeepUp"]) {
//here
[self.loadingView stopAnimating];
//
if (self.currentItem.playbackLikelyToKeepUp && self.state == WMPlayerStateBuffering){
NSLog(@"55555%s WMPlayerStatePlaying",__FUNCTION__);
if (self.state==WMPlayerStateStopped||self.state==WMPlayerStatePause) {
}else{
self.state = WMPlayerStatePlaying;
}
}
}
}
}
//
- (void)loadedTimeRanges{
if (self.state==WMPlayerStatePause) {
}else{
self.state = WMPlayerStateBuffering;
}
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(5.0 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{
if (self.state==WMPlayerStatePlaying||self.state==WMPlayerStateFinished) {
}else{
[self play];
}
[self.loadingView stopAnimating];
});
}
#pragma mark
#pragma mark autoDismissControlView
-(void)autoDismissControlView{
[self hiddenControlView];//
}
#pragma mark -
-(void)initTimer{
__weak typeof(self) weakSelf = self;
self.playbackTimeObserver = [self.player addPeriodicTimeObserverForInterval:CMTimeMakeWithSeconds(1.0, NSEC_PER_SEC) queue:dispatch_get_main_queue() /* If you pass NULL, the main queue is used. */
usingBlock:^(CMTime time){
[weakSelf syncScrubber];
}];
}
- (void)syncScrubber{
CMTime playerDuration = [self playerItemDuration];
CGFloat totalTime = (CGFloat)CMTimeGetSeconds(playerDuration);
long long nowTime = self.currentItem.currentTime.value/self.currentItem.currentTime.timescale;
if (self.isFullscreen) {
self.leftTimeLabel.text = [NSString stringWithFormat:@"%@/%@",[self convertTime:nowTime],[self convertTime:self.totalTime]];
self.rightTimeLabel.text = @"";
}else{
self.leftTimeLabel.text = [self convertTime:nowTime];
self.rightTimeLabel.text = ([self convertTime:self.totalTime]);
}
if (isnan(totalTime)) {
self.rightTimeLabel.text = @"";
NSLog(@"NaN");
}
if (self.dragingSliderStatus==1) {//sliderslider
}else if(self.dragingSliderStatus==2){
nowTime = self.progressSlider.value;
CGFloat value = (self.progressSlider.maximumValue - self.progressSlider.minimumValue) * nowTime / self.totalTime + self.progressSlider.minimumValue;
self.progressSlider.value = value;
[self.bottomProgress setProgress:nowTime/(self.totalTime) animated:NO];
self.dragingSliderStatus = 0;
}else if(self.dragingSliderStatus==0){
CGFloat value = (self.progressSlider.maximumValue - self.progressSlider.minimumValue) * nowTime / self.totalTime + self.progressSlider.minimumValue;
self.progressSlider.value = value;
[self.bottomProgress setProgress:nowTime/(self.totalTime) animated:YES];
}
}
//seekTimetime
- (void)seekToTimeToPlay:(double)seekTime{
if (self.player&&self.player.currentItem.status == AVPlayerItemStatusReadyToPlay) {
if (seekTime>=self.totalTime) {
seekTime = 0.0;
}
if (seekTime<0) {
seekTime=0.0;
}
// int32_t timeScale = self.player.currentItem.asset.duration.timescale;
//currentItem.asset.duration.timescale
/* A timescale of 1 means you can only specify whole seconds to seek to. The timescale is the number of parts per second. Use 600 for video, as Apple recommends, since it is a product of the common video frame rates like 50, 60, 25 and 24 frames per second*/
[self.player seekToTime:CMTimeMakeWithSeconds(seekTime, self.currentItem.currentTime.timescale) toleranceBefore:kCMTimeZero toleranceAfter:kCMTimeZero completionHandler:^(BOOL finished) {
self.seekTime = 0;
}];
}
}
- (CMTime)playerItemDuration{
AVPlayerItem *playerItem = self.currentItem;
if (playerItem.status == AVPlayerItemStatusReadyToPlay){
return([playerItem duration]);
}
return(kCMTimeInvalid);
}
- (NSString *)convertTime:(float)second{
NSDate *d = [NSDate dateWithTimeIntervalSince1970:second];
if (second/3600 >= 1) {
[[self dateFormatter] setDateFormat:@"HH:mm:ss"];
} else {
[[self dateFormatter] setDateFormat:@"mm:ss"];
}
return [[self dateFormatter] stringFromDate:d];
}
//
- (NSTimeInterval)availableDuration {
NSArray *loadedTimeRanges = [_currentItem loadedTimeRanges];
CMTimeRange timeRange = [loadedTimeRanges.firstObject CMTimeRangeValue];//
float startSeconds = CMTimeGetSeconds(timeRange.start);
float durationSeconds = CMTimeGetSeconds(timeRange.duration);
NSTimeInterval result = startSeconds + durationSeconds;//
return result;
}
#pragma mark
#pragma mark - touches
- (void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event{
//,
UITouch * touch = (UITouch *)touches.anyObject;
if (touches.count > 1 || [touch tapCount] > 1 || event.allTouches.count > 1) {
return;
}
// , ,
if (![[(UITouch *)touches.anyObject view] isEqual:self.contentView] && ![[(UITouch *)touches.anyObject view] isEqual:self]) {
return;
}
[super touchesBegan:touches withEvent:event];
//,
self.hasMoved = NO;
self.touchBeginValue = self.progressSlider.value;
//
self.touchBeginPoint = [touches.anyObject locationInView:self];
//
self.touchBeginLightValue = [UIScreen mainScreen].brightness;
//
self.touchBeginVoiceValue = self.volumeSlider.value;
}
- (void)touchesMoved:(NSSet *)touches withEvent:(UIEvent *)event{
UITouch * touch = (UITouch *)touches.anyObject;
if (touches.count > 1 || [touch tapCount] > 1 || event.allTouches.count > 1) {
return;
}
if (![[(UITouch *)touches.anyObject view] isEqual:self.contentView] && ![[(UITouch *)touches.anyObject view] isEqual:self]) {
return;
}
[super touchesMoved:touches withEvent:event];
//,
CGPoint tempPoint = [touches.anyObject locationInView:self];
if (fabs(tempPoint.x - self.touchBeginPoint.x) < LeastDistance && fabs(tempPoint.y - self.touchBeginPoint.y) < LeastDistance) {
return;
}
self.hasMoved = YES;
//,
//tan
float tan = fabs(tempPoint.y - _touchBeginPoint.y)/fabs(tempPoint.x - self.touchBeginPoint.x);
if (tan < 1/sqrt(3)) { //30,
self.controlType = WMControlTypeProgress;
}else if(tan > sqrt(3)){ //60,
//, ,
if (self.touchBeginPoint.x < self.bounds.size.width/2) {
self.controlType = WMControlTypeLight;
}else{
self.controlType = WMControlTypeVoice;
}
}else{ //
self.controlType = WMControlTypeDefault;
return;
}
if (self.controlType == WMControlTypeProgress) { //
if (self.enableFastForwardGesture) {
float value = [self moveProgressControllWithTempPoint:tempPoint];
[self timeValueChangingWithValue:value];
}
}else if(self.controlType == WMControlTypeVoice){ //
if (self.isFullscreen) {//
if (self.enableVolumeGesture) {
//
float voiceValue = self.touchBeginVoiceValue - ((tempPoint.y - self.touchBeginPoint.y)/self.bounds.size.height);
//, 0~1
if (voiceValue < 0) {
self.volumeSlider.value = 0;
}else if(voiceValue > 1){
self.volumeSlider.value = 1;
}else{
self.volumeSlider.value = voiceValue;
}
}
}else{
return;
}
}else if(self.controlType == WMControlTypeLight){ //
if (self.isFullscreen) {
//,
float tempLightValue = self.touchBeginLightValue - ((tempPoint.y - _touchBeginPoint.y)/self.bounds.size.height);
if (tempLightValue < 0) {
tempLightValue = 0;
}else if(tempLightValue > 1){
tempLightValue = 1;
}
//
[UIScreen mainScreen].brightness = tempLightValue;
// view
NSLog(@" = %f",tempLightValue);
[self.contentView bringSubviewToFront:self.lightView];
}else{
}
}
}
-(void)touchesCancelled:(NSSet *)touches withEvent:(UIEvent *)event{
[super touchesCancelled:touches withEvent:event];
//,
if (self.hasMoved) {
if (_controlType == WMControlTypeProgress) { //
CGPoint tempPoint = [touches.anyObject locationInView:self];
// if ([self.delegate respondsToSelector:@selector(seekToTheTimeValue:)]) {
if (self.enableFastForwardGesture) {
float value = [self moveProgressControllWithTempPoint:tempPoint];
// [self.delegate seekToTheTimeValue:value];
[self seekToTimeToPlay:value];
}
// }
self.FF_View.hidden = YES;
}else if (_controlType == WMControlTypeLight){//, view
}
}else{
}
}
- (void)touchesEnded:(NSSet *)touches withEvent:(UIEvent *)event{
self.FF_View.hidden = YES;
[super touchesEnded:touches withEvent:event];
//,
if (self.hasMoved) {
if (self.controlType == WMControlTypeProgress) { //
// if ([self.delegate respondsToSelector:@selector(seekToTheTimeValue:)]) {
if (self.enableFastForwardGesture) {
CGPoint tempPoint = [touches.anyObject locationInView:self];
float value = [self moveProgressControllWithTempPoint:tempPoint];
[self seekToTimeToPlay:value];
self.FF_View.hidden = YES;
}
}else if (_controlType == WMControlTypeLight){//, view
}
}else{
}
}
#pragma mark -
-(float)moveProgressControllWithTempPoint:(CGPoint)tempPoint{
//90
float tempValue = self.touchBeginValue + TotalScreenTime * ((tempPoint.x - self.touchBeginPoint.x)/([UIScreen mainScreen].bounds.size.width));
if (tempValue > [self duration]) {
tempValue = [self duration];
}else if (tempValue < 0){
tempValue = 0.0f;
}
return tempValue;
}
#pragma mark - view
-(void)timeValueChangingWithValue:(float)value{
if (value > self.touchBeginValue) {
self.FF_View.stateImageView.image = WMPlayerImage(@"progress_icon_r");
}else if(value < self.touchBeginValue){
self.FF_View.stateImageView.image = WMPlayerImage(@"progress_icon_l");
}
self.FF_View.hidden = NO;
self.FF_View.timeLabel.text = [NSString stringWithFormat:@"%@/%@", [self convertTime:value], [self convertTime:self.totalTime]];
self.leftTimeLabel.text = [self convertTime:value];
}
NSString * calculateTimeWithTimeFormatter(long long timeSecond){
NSString * theLastTime = nil;
if (timeSecond < 60) {
theLastTime = [NSString stringWithFormat:@"00:%.2lld", timeSecond];
}else if(timeSecond >= 60 && timeSecond < 3600){
theLastTime = [NSString stringWithFormat:@"%.2lld:%.2lld", timeSecond/60, timeSecond%60];
}else if(timeSecond >= 3600){
theLastTime = [NSString stringWithFormat:@"%.2lld:%.2lld:%.2lld", timeSecond/3600, timeSecond%3600/60, timeSecond%60];
}
return theLastTime;
}
//
-(void )resetWMPlayer{
self.currentItem = nil;
self.isInitPlayer = NO;
self.bottomProgress.progress = 0;
_playerModel = nil;
self.seekTime = 0;
//
[[NSNotificationCenter defaultCenter] removeObserver:self];
//
[self pause];
self.progressSlider.value = 0;
self.bottomProgress.progress = 0;
self.loadingProgress.progress = 0;
if (self.isFullscreen) {
self.leftTimeLabel.text = [NSString stringWithFormat:@"%@/%@",[self convertTime:0.0],[self convertTime:0.0]];
self.rightTimeLabel.text = @"";
}else{
self.leftTimeLabel.text = [self convertTime:0.0];//
self.rightTimeLabel.text = ([self convertTime:self.totalTime]);
}
self.leftTimeLabel.text = self.isFullscreen?([NSString stringWithFormat:@"/%@",[self convertTime:self.totalTime]]):([self convertTime:self.totalTime]);
// layer
[self.playerLayer removeFromSuperlayer];
// PlayerItemnil
[self.player replaceCurrentItemWithPlayerItem:nil];
// playernil
self.player = nil;
}
-(void)dealloc{
NSLog(@"WMPlayer dealloc");
[[NSNotificationCenter defaultCenter] removeObserver:self];
[self.player.currentItem cancelPendingSeeks];
[self.player.currentItem.asset cancelLoading];
[self.player pause];
[self.player removeTimeObserver:self.playbackTimeObserver];
//
[_currentItem removeObserver:self forKeyPath:@"status"];
[_currentItem removeObserver:self forKeyPath:@"loadedTimeRanges"];
[_currentItem removeObserver:self forKeyPath:@"playbackBufferEmpty"];
[_currentItem removeObserver:self forKeyPath:@"playbackLikelyToKeepUp"];
[_currentItem removeObserver:self forKeyPath:@"duration"];
[_currentItem removeObserver:self forKeyPath:@"presentationSize"];
_currentItem = nil;
[self.playerLayer removeFromSuperlayer];
[self.player replaceCurrentItemWithPlayerItem:nil];
self.player = nil;
self.playOrPauseBtn = nil;
self.playerLayer = nil;
self.lightView = nil;
[UIApplication sharedApplication].idleTimerDisabled=NO;
}
//
+(CGAffineTransform)getCurrentDeviceOrientation{
//,
UIInterfaceOrientation orientation = [UIApplication sharedApplication].statusBarOrientation;
//
if (orientation ==UIInterfaceOrientationPortrait) {
return CGAffineTransformIdentity;
}else if (orientation ==UIInterfaceOrientationLandscapeLeft){
return CGAffineTransformMakeRotation(-M_PI_2);
}else if(orientation ==UIInterfaceOrientationLandscapeRight){
return CGAffineTransformMakeRotation(M_PI_2);
}
return CGAffineTransformIdentity;
}
//
+(NSString *)version{
return @"5.1.0";
}
@end
``` | /content/code_sandbox/WMPlayer/WMPlayer.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 13,141 |
```objective-c
//
// AppDelegate.m
// WeChat
//
// Created by zhengwenming on 16/6/4.
//
#import "AppDelegate.h"
#import "RootTabBarController.h"
@interface AppDelegate ()
@end
@implementation AppDelegate
- (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions {
self.window = [[UIWindow alloc] initWithFrame:[UIScreen mainScreen].bounds];
self.window.rootViewController = [RootTabBarController new];
self.window.backgroundColor = [UIColor whiteColor];
[self.window makeKeyAndVisible];
return YES;
}
- (void)applicationWillResignActive:(UIApplication *)application {
// Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state.
// Use this method to pause ongoing tasks, disable timers, and throttle down OpenGL ES frame rates. Games should use this method to pause the game.
}
- (void)applicationDidEnterBackground:(UIApplication *)application {
// Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later.
// If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits.
}
- (void)applicationWillEnterForeground:(UIApplication *)application {
// Called as part of the transition from the background to the inactive state; here you can undo many of the changes made on entering the background.
}
- (void)applicationDidBecomeActive:(UIApplication *)application {
// Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface.
}
- (void)applicationWillTerminate:(UIApplication *)application {
// Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:.
}
@end
``` | /content/code_sandbox/WeChat/AppDelegate.m | objective-c | 2016-06-06T01:53:42 | 2024-08-05T09:45:48 | WeChat | zhengwenming/WeChat | 1,626 | 417 |
```objective-c
//
// AddressBookCell.h
// IHKApp
//
// Created by on 15/4/23.
//
#import <UIKit/UIKit.h>
#import "FriendInfoModel.h"
@interface AddressBookCell : UITableViewCell
@property (weak, nonatomic) IBOutlet UIImageView *photoIV;
@property (weak, nonatomic) IBOutlet UILabel *nameLabel;
@property(nonatomic,strong)FriendInfoModel *frendModel;
@end
``` | /content/code_sandbox/WeChat/ViewController/AddressBook通讯录/AddressBookCell.h | objective-c | 2016-06-06T01:53:42 | 2024-08-05T09:45:48 | WeChat | zhengwenming/WeChat | 1,626 | 88 |
```objective-c
//
// WMSearchController.h
// WeChat
//
// Created by zhengwenming on 2018/4/3.
//
#import <UIKit/UIKit.h>
#import "SearchResultViewController.h"
@interface WMSearchController : UISearchController
@property (nonatomic, assign) BOOL enableVoiceInput;
+ (WMSearchController *)searchController:(BaseViewController<WMSearchResultControllerProtocol> *)resultsController;
@end
``` | /content/code_sandbox/WeChat/ViewController/AddressBook通讯录/WMSearchController.h | objective-c | 2016-06-06T01:53:42 | 2024-08-05T09:45:48 | WeChat | zhengwenming/WeChat | 1,626 | 93 |
```objective-c
//
// ContactsViewController.m
// WeChat
//
// Created by zhengwenming on 16/6/5.
//
#import "ContactsViewController.h"
#import "AddressBookCell.h"
#import "FriendInfoModel.h"
#import "WMSearchController.h"
#import "SearchResultViewController.h"
@interface ContactsViewController ()<UISearchBarDelegate,UISearchResultsUpdating,UITableViewDataSource,UITableViewDelegate>{
NSMutableArray *dataSource;
NSMutableArray *updateArray;
}
///
@property (nonatomic, strong) WMSearchController *searchController;
@property(nonatomic,strong) NSMutableArray *lettersArray;
@property(nonatomic,strong) NSMutableArray *topFixedArray;
@property(nonatomic,strong) NSMutableDictionary *nameDic;
@property(nonatomic,strong) UITableView *friendTableView;
@property(nonatomic,strong)UILabel *footerLabel;
@property(nonatomic,strong)UIView *footer;
@end
@implementation ContactsViewController
-(UITableView *)friendTableView{
if (_friendTableView==nil) {
_friendTableView = [UITableView new];
_friendTableView.delegate = self;
_friendTableView.dataSource = self;
_friendTableView.rowHeight = 50.f;
_friendTableView.backgroundColor = [UIColor groupTableViewBackgroundColor];
[_friendTableView registerNib:[UINib nibWithNibName:NSStringFromClass([AddressBookCell class]) bundle:nil] forCellReuseIdentifier:NSStringFromClass([AddressBookCell class])];
//index
_friendTableView.sectionIndexColor = [UIColor darkGrayColor];
_friendTableView.sectionIndexBackgroundColor = [UIColor clearColor];
}
return _friendTableView;
}
-(UILabel *)footerLabel{
if (_footerLabel==nil) {
_footerLabel = [[UILabel alloc] initWithFrame:CGRectMake(0, 1.0/(UIScreen.mainScreen.scale), self.view.frame.size.width, 50-1.0/(UIScreen.mainScreen.scale))];
_footerLabel.textAlignment = NSTextAlignmentCenter;
_footerLabel.textColor = [UIColor grayColor];
_footerLabel.backgroundColor = [UIColor whiteColor];
_footerLabel.font = [UIFont systemFontOfSize:17.f];
}
return _footerLabel;
}
-(UIView *)footer{
if (_footer==nil) {
_footer =[[UIView alloc] initWithFrame:CGRectMake(0, 0, self.view.frame.size.width, 50)];
_footer.backgroundColor = self.friendTableView.separatorColor;
[_footer addSubview:self.footerLabel];
}
return _footer;
}
- (WMSearchController *)searchController{
if (_searchController == nil) {
SearchResultViewController *searchReslutVC = [[SearchResultViewController alloc] init];
@weakify(self);
[searchReslutVC setItemSelectedAction:^(SearchResultViewController *searchVC, FriendInfoModel *userModel) {
@strongify(self);
[self.searchController setActive:NO];
}];
_searchController = [WMSearchController searchController:searchReslutVC];
_searchController.searchBar.delegate = self;
[_searchController setEnableVoiceInput:YES];
}
return _searchController;
}
-(void)addFriends:(UIBarButtonItem *)sender{
NSLog(@"addFriends");
}
- (void)viewDidLoad {
[super viewDidLoad];
dataSource = [[NSMutableArray alloc]init];
updateArray = [[NSMutableArray alloc]init];
self.lettersArray = [[NSMutableArray alloc]init];
self.nameDic = [[NSMutableDictionary alloc]init];
[self loadAddressBookData];
self.navigationItem.rightBarButtonItem = [[UIBarButtonItem alloc] initWithImage:[UIImage imageNamed:@"contacts_add_friend"] style:UIBarButtonItemStylePlain target:self action:@selector(addFriends:)];
[self.view addSubview:self.friendTableView];
[self.friendTableView mas_makeConstraints:^(MASConstraintMaker *make) {
make.edges.mas_equalTo(0);
}];
self.friendTableView.tableHeaderView = self.searchController.searchBar;
self.friendTableView.tableFooterView = self.footer;
}
-(void)loadAddressBookData{
self.topFixedArray = [NSMutableArray arrayWithCapacity:4];
FriendInfoModel *friends_new = [FriendInfoModel new];
friends_new.userName = @"";
friends_new.imgName = @"friends_new";
[self.topFixedArray addObject:friends_new];
FriendInfoModel *friends_group = [FriendInfoModel new];
friends_group.userName = @"";
friends_group.imgName = @"friends_group";
[self.topFixedArray addObject:friends_group];
FriendInfoModel *friends_tag = [FriendInfoModel new];
friends_tag.userName = @"";
friends_tag.imgName = @"friends_tag";
[self.topFixedArray addObject:friends_tag];
FriendInfoModel *friends_public = [FriendInfoModel new];
friends_public.userName = @"";
friends_public.imgName = @"friends_public";
[self.topFixedArray addObject:friends_public];
NSData *friendsData = [NSData dataWithContentsOfURL:[NSURL fileURLWithPath:[[NSBundle mainBundle] pathForResource:@"AddressBook" ofType:@"json"]]];
NSDictionary *JSONDic = [NSJSONSerialization JSONObjectWithData:friendsData options:NSJSONReadingAllowFragments error:nil];
for (NSDictionary *eachDic in JSONDic[@"friends"][@"row"]) {
[dataSource addObject:[[FriendInfoModel alloc]initWithDic:eachDic]];
}
self.footerLabel.text = [NSString stringWithFormat:@"%lu",(unsigned long)dataSource.count];
[self handleLettersArray];
}
#pragma mark
#pragma mark tableView delegate
-(NSInteger)numberOfSectionsInTableView:(UITableView *)tableView{
return self.lettersArray.count+1;
}
-(NSInteger)tableView:(UITableView *)tableView numberOfRowsInSection:(NSInteger)section{
if (section) {
NSArray *nameArray = [self.nameDic objectForKey:self.lettersArray[section-1]];
return nameArray.count;
}else{
return self.topFixedArray.count;
}
}
- (void)scrollViewDidScroll:(UIScrollView *)scrollView {
[self dealTipsColor];
}
-(void)dealTipsColor{
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(0.1 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{
for (UIView *view in self.friendTableView.subviews) {
if (view.tag==100) {
UILabel *titleLabel = view.subviews.firstObject;
CGRect frame = [self.friendTableView convertRect:view.frame toView:self.view];
if (frame.origin.y<=kNavbarHeight+1&&frame.origin.y>85) {
titleLabel.textColor = kThemeColor;
}else{
if (frame.origin.y<=100&&frame.origin.y>=90) {
titleLabel.textColor = kThemeColor;
}else{
titleLabel.textColor = [UIColor grayColor];
}
}
}
}
});
}
-(UITableViewCell *)tableView:(UITableView *)tableView cellForRowAtIndexPath:(NSIndexPath *)indexPath{
AddressBookCell *cell = (AddressBookCell *)[tableView dequeueReusableCellWithIdentifier:@"AddressBookCell"];
FriendInfoModel *frends = indexPath.section?([[self.nameDic objectForKey:self.lettersArray[indexPath.section-1]] objectAtIndex:indexPath.row]):(self.topFixedArray[indexPath.row]);
cell.frendModel = frends;
return cell;
}
-(void)tableView:(UITableView *)tableView didSelectRowAtIndexPath:(NSIndexPath *)indexPath{
[tableView deselectRowAtIndexPath:indexPath animated:YES];
// FriendInfoModel *friends = [[self.nameDic objectForKey:[self.lettersArray objectAtIndex:indexPath.section]] objectAtIndex:indexPath.row];
}
-(void)selectPersonWithUserId:(NSString *)userId userName:(NSString *)userName photo:(NSString *)photo phoneNO:(NSString *)phoneNO{
}
-(UIView *)tableView:(UITableView *)tableView viewForFooterInSection:(NSInteger)section{
return [UIView new];
}
-(CGFloat)tableView:(UITableView *)tableView heightForFooterInSection:(NSInteger)section{
return CGFLOAT_MIN;
}
-(UIView *)tableView:(UITableView *)tableView viewForHeaderInSection:(NSInteger)section{
if (section>=1) {
UIView *headerView = [[UIView alloc] initWithFrame:CGRectMake(0, 0, tableView.frame.size.width, 25)];
headerView.backgroundColor = [UIColor groupTableViewBackgroundColor];
headerView.tag =100;
NSString *letterString = self.lettersArray[section-1];
UILabel *letterLabel = [[UILabel alloc] initWithFrame:CGRectMake(15, headerView.frame.origin.y, headerView.frame.size.width-10, headerView.frame.size.height)];
letterLabel.textColor = [UIColor grayColor];
letterLabel.font = [UIFont systemFontOfSize:14.f];
if (![letterString isEqualToString:UITableViewIndexSearch]) {
letterLabel.text = letterString;
}
[headerView addSubview:letterLabel];
[self dealTipsColor];
return headerView;
}
return [UIView new];
}
-(CGFloat)tableView:(UITableView *)tableView heightForHeaderInSection:(NSInteger)section{
return section>1?25.f:CGFLOAT_MIN;
}
- (NSString *)tableView:(UITableView *)tableView titleForHeaderInSection:(NSInteger)section{
return self.lettersArray[section];
}
- (NSInteger)tableView:(UITableView *)tableView sectionForSectionIndexTitle:(NSString *)title atIndex:(NSInteger)index{
return index;
}
- (NSArray<NSString *> *)sectionIndexTitlesForTableView:(UITableView *)tableView{
return self.lettersArray;
}
#pragma mark
#pragma mark UISearchBarDelegate
- (void)searchBarTextDidBeginEditing:(UISearchBar *)searchBar{
}
- (void)searchBar:(UISearchBar *)searchBar textDidChange:(NSString *)searchText{
if (searchText) {
[updateArray removeAllObjects];
if ([PinyinHelper isIncludeChineseInString:searchText]) {//
for(int i=0;i<dataSource.count;i++){
FriendInfoModel *friends = dataSource[i];
if ([friends.userName rangeOfString:searchText].location!=NSNotFound) {
[updateArray addObject:friends];
}
}
}else{//
for(int i=0;i<dataSource.count;i++){
HanyuPinyinOutputFormat *formatter = [[HanyuPinyinOutputFormat alloc] init];
formatter.caseType = CaseTypeUppercase;
formatter.vCharType = VCharTypeWithV;
formatter.toneType = ToneTypeWithoutTone;
//zhengshuang e
FriendInfoModel *friends = dataSource[i];
NSString *outputPinyin=[[PinyinHelper toHanyuPinyinStringWithNSString:friends.userName withHanyuPinyinOutputFormat:formatter withNSString:@""] lowercaseString];
if ([[outputPinyin lowercaseString]rangeOfString:[searchText lowercaseString]].location!=NSNotFound) {
[updateArray addObject:friends];
}
}
}
}
}
- (BOOL)searchBarShouldBeginEditing:(UISearchBar *)searchBar{
if (IS_IPHONEX) {
for (UIView *view in searchBar.subviews[0].subviews) {
if ([view isKindOfClass:NSClassFromString(@"UISearchBarTextField")]) {
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(0.15 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{
[UIView animateWithDuration:0.15 animations:^{
view.frame = CGRectMake(view.frame.origin.x, 12, view.frame.size.width, view.frame.size.height);
UIButton *canceLBtn = [searchBar valueForKey:@"cancelButton"];
canceLBtn.frame = CGRectMake(canceLBtn.frame.origin.x, 12, canceLBtn.frame.size.width, canceLBtn.frame.size.height);
}];
});
}
}
}
return YES;
}
-(BOOL)searchBarShouldEndEditing:(UISearchBar *)searchBar{
if (IS_IPHONEX) {
for (UIView *view in searchBar.subviews[0].subviews) {
if ([view isKindOfClass:NSClassFromString(@"UISearchBarTextField")]) {
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(0.15 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{
[UIView animateWithDuration:0.1 animations:^{
view.frame = CGRectMake(view.frame.origin.x, 12, view.frame.size.width, view.frame.size.height);
UIButton *canceLBtn = [searchBar valueForKey:@"cancelButton"];
canceLBtn.frame = CGRectMake(canceLBtn.frame.origin.x, 12, canceLBtn.frame.size.width, canceLBtn.frame.size.height);
}];
});
}
}
}
return YES;
}
//letterArray
- (void)handleLettersArray{
NSMutableDictionary *tempDic = [[NSMutableDictionary alloc]init];
for(FriendInfoModel *friends in dataSource){
HanyuPinyinOutputFormat *formatter = [[HanyuPinyinOutputFormat alloc] init];
formatter.caseType = CaseTypeLowercase;
formatter.vCharType = VCharTypeWithV;
formatter.toneType = ToneTypeWithoutTone;
NSString *outputPinyin=[PinyinHelper toHanyuPinyinStringWithNSString:friends.userName withHanyuPinyinOutputFormat:formatter withNSString:@""];
// NSLog(@"%@",[[outputPinyin substringToIndex:1] uppercaseString]);
[tempDic setObject:friends forKey:[[outputPinyin substringToIndex:1] uppercaseString]];
}
self.lettersArray = tempDic.allKeys.mutableCopy;
for (NSString *letter in self.lettersArray) {
NSMutableArray *tempArry = [[NSMutableArray alloc] init];
for (NSInteger i = 0; i<dataSource.count; i++) {
FriendInfoModel *friends = dataSource[i];
HanyuPinyinOutputFormat *formatter = [[HanyuPinyinOutputFormat alloc] init];
formatter.caseType = CaseTypeUppercase;
formatter.vCharType = VCharTypeWithV;
formatter.toneType = ToneTypeWithoutTone;
//frienduserName---->zhanglei
NSString *outputPinyin=[PinyinHelper toHanyuPinyinStringWithNSString:friends.userName withHanyuPinyinOutputFormat:formatter withNSString:@""];
if ([letter isEqualToString:[[outputPinyin substringToIndex:1] uppercaseString]]) {
[tempArry addObject:friends];
}
}
[self.nameDic setObject:tempArry forKey:letter];
}
//
NSComparator cmptr = ^(id obj1, id obj2){
if ([obj1 characterAtIndex:0] > [obj2 characterAtIndex:0]) {
return (NSComparisonResult)NSOrderedDescending;
}
if ([obj1 characterAtIndex:0] < [obj2 characterAtIndex:0]) {
return (NSComparisonResult)NSOrderedAscending;
}
return (NSComparisonResult)NSOrderedSame;
};
[self.nameDic setObject:UITableViewIndexSearch forKey:@""];
self.lettersArray = [[NSMutableArray alloc]initWithArray:[self.lettersArray sortedArrayUsingComparator:cmptr]];
[self.lettersArray insertObject:UITableViewIndexSearch atIndex:0];
}
- (void)didReceiveMemoryWarning {
[super didReceiveMemoryWarning];
}
@end
``` | /content/code_sandbox/WeChat/ViewController/AddressBook通讯录/ContactsViewController.m | objective-c | 2016-06-06T01:53:42 | 2024-08-05T09:45:48 | WeChat | zhengwenming/WeChat | 1,626 | 3,100 |
```objective-c
//
// AppDelegate.h
// WeChat
//
// Created by zhengwenming on 16/6/4.
//
#import <UIKit/UIKit.h>
@interface AppDelegate : UIResponder <UIApplicationDelegate>
@property (strong, nonatomic) UIWindow *window;
@end
``` | /content/code_sandbox/WeChat/AppDelegate.h | objective-c | 2016-06-06T01:53:42 | 2024-08-05T09:45:48 | WeChat | zhengwenming/WeChat | 1,626 | 55 |
```objective-c
//
// RootTabBarController.h
// WeChat
//
// Created by zhengwenming on 16/6/5.
//
#import <UIKit/UIKit.h>
@interface RootTabBarController : UITabBarController
@end
``` | /content/code_sandbox/WeChat/ViewController/TabBarController/RootTabBarController.h | objective-c | 2016-06-06T01:53:42 | 2024-08-05T09:45:48 | WeChat | zhengwenming/WeChat | 1,626 | 49 |
```objective-c
//
// main.m
// WeChat
//
// Created by zhengwenming on 16/6/4.
//
#import <UIKit/UIKit.h>
#import "AppDelegate.h"
int main(int argc, char * argv[]) {
@autoreleasepool {
return UIApplicationMain(argc, argv, nil, NSStringFromClass([AppDelegate class]));
}
}
``` | /content/code_sandbox/WeChat/main.m | objective-c | 2016-06-06T01:53:42 | 2024-08-05T09:45:48 | WeChat | zhengwenming/WeChat | 1,626 | 73 |
```xml
<?xml version="1.0" encoding="UTF-8"?>
<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="16096" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" launchScreen="YES" useTraitCollections="YES" colorMatched="YES" initialViewController="01J-lp-oVM">
<device id="retina6_1" orientation="portrait" appearance="light"/>
<dependencies>
<deployment identifier="iOS"/>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="16086"/>
<capability name="documents saved in the Xcode 8 format" minToolsVersion="8.0"/>
</dependencies>
<scenes>
<!--View Controller-->
<scene sceneID="EHf-IW-A2E">
<objects>
<viewController id="01J-lp-oVM" sceneMemberID="viewController">
<layoutGuides>
<viewControllerLayoutGuide type="top" id="Llm-lL-Icb"/>
<viewControllerLayoutGuide type="bottom" id="xb3-aO-Qok"/>
</layoutGuides>
<view key="view" contentMode="scaleToFill" id="Ze5-6b-2t3">
<rect key="frame" x="0.0" y="0.0" width="414" height="896"/>
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
<subviews>
<imageView userInteractionEnabled="NO" contentMode="scaleToFill" horizontalHuggingPriority="251" verticalHuggingPriority="251" image="568" translatesAutoresizingMaskIntoConstraints="NO" id="Ahc-Gn-dfv">
<rect key="frame" x="0.0" y="0.0" width="414" height="862"/>
</imageView>
</subviews>
<color key="backgroundColor" red="1" green="1" blue="1" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
<constraints>
<constraint firstItem="Ahc-Gn-dfv" firstAttribute="top" secondItem="Ze5-6b-2t3" secondAttribute="top" id="Dcy-bS-xuB"/>
<constraint firstItem="Ahc-Gn-dfv" firstAttribute="leading" secondItem="Ze5-6b-2t3" secondAttribute="leading" id="MuT-Vw-Fw9"/>
<constraint firstAttribute="trailing" secondItem="Ahc-Gn-dfv" secondAttribute="trailing" id="Z79-W2-Lm6"/>
<constraint firstItem="xb3-aO-Qok" firstAttribute="top" secondItem="Ahc-Gn-dfv" secondAttribute="bottom" id="pBV-jr-Ts1"/>
</constraints>
</view>
</viewController>
<placeholder placeholderIdentifier="IBFirstResponder" id="iYj-Kq-Ea1" userLabel="First Responder" sceneMemberID="firstResponder"/>
</objects>
<point key="canvasLocation" x="301.875" y="327.46478873239437"/>
</scene>
</scenes>
<resources>
<image name="568" width="320" height="568"/>
</resources>
</document>
``` | /content/code_sandbox/WeChat/Base.lproj/LaunchScreen.storyboard | xml | 2016-06-06T01:53:42 | 2024-08-05T09:45:48 | WeChat | zhengwenming/WeChat | 1,626 | 745 |
```objective-c
/*!
@header SearchResultViewController.h
@abstract Githubpath_to_url
CSDN:path_to_url
@author Created by zhengwenming on 16/3/11
@version 1.00 16/3/11 Creation()
*/
#import <UIKit/UIKit.h>
#import "BaseViewController.h"
#import "FriendInfoModel.h"
@protocol WMSearchResultControllerProtocol <UISearchResultsUpdating, UISearchBarDelegate,UISearchControllerDelegate>
- (void)setItemClickAction:(void (^)(__kindof UIViewController *searchResultVC, id data))itemClickAction;
@end
@interface SearchResultViewController : BaseViewController<WMSearchResultControllerProtocol>
@property (nonatomic, copy) void (^itemSelectedAction)(SearchResultViewController *searchResultVC, FriendInfoModel *userModel);
@end
``` | /content/code_sandbox/WeChat/ViewController/AddressBook通讯录/SearchResultViewController.h | objective-c | 2016-06-06T01:53:42 | 2024-08-05T09:45:48 | WeChat | zhengwenming/WeChat | 1,626 | 168 |
```objective-c
//
// WMSearchController.m
// WeChat
//
// Created by zhengwenming on 2018/4/3.
//
#import "WMSearchController.h"
@interface WMSearchController ()
@end
@implementation WMSearchController
+ (WMSearchController *)searchController:(UIViewController<WMSearchResultControllerProtocol> *)resultsController{
if (!resultsController) {
return nil;
}
WMSearchController *searchController = [[WMSearchController alloc] initWithSearchResultsController:resultsController];
searchController.searchResultsUpdater = resultsController;
return searchController;
}
- (instancetype)initWithSearchResultsController:(UIViewController<WMSearchResultControllerProtocol> *)searchResultsController{
if (self = [super initWithSearchResultsController:searchResultsController]) {
self.delegate = searchResultsController;
self.definesPresentationContext = YES;
searchResultsController.edgesForExtendedLayout = UIRectEdgeNone;
self.searchBar.placeholder = @"";
self.searchBar.delegate = searchResultsController;
self.searchBar.translucent = NO;
self.searchBar.tintColor = kThemeColor;
// for (UIView *view in self.searchBar.subviews[0].subviews) {
// if ([view isKindOfClass:NSClassFromString(@"UISearchBarBackground")]) {
// if (view) {
//// [view removeFromSuperview];
// }
// break;
// }
// }
[[UIBarButtonItem appearanceWhenContainedIn:[UISearchBar class], nil] setTitle:@""];
}
return self;
}
- (void)viewWillAppear:(BOOL)animated{
[super viewWillAppear:animated];
}
- (void)viewWillDisappear:(BOOL)animated{
[super viewWillDisappear:animated];
}
-(void)viewDidLoad{
[super viewDidLoad];
}
- (void)setEnableVoiceInput:(BOOL)showVoiceButton{
_enableVoiceInput = showVoiceButton;
[self.searchBar setShowsBookmarkButton:showVoiceButton];
if (showVoiceButton) {
[self.searchBar setImage:[UIImage imageNamed:@"VoiceSearchStartBtn"] forSearchBarIcon:UISearchBarIconBookmark state:UIControlStateNormal];
[self.searchBar setImage:[UIImage imageNamed:@"VoiceSearchStartBtnHL"] forSearchBarIcon:UISearchBarIconBookmark state:UIControlStateHighlighted];
}else {
[self.searchBar setShowsBookmarkButton:NO];
}
}
@end
``` | /content/code_sandbox/WeChat/ViewController/AddressBook通讯录/WMSearchController.m | objective-c | 2016-06-06T01:53:42 | 2024-08-05T09:45:48 | WeChat | zhengwenming/WeChat | 1,626 | 483 |
```xml
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "path_to_url">
<plist version="1.0">
<dict>
<key>UIUserInterfaceStyle</key>
<string>Light</string>
<key>CFBundleAllowMixedLocalizations</key>
<true/>
<key>CFBundleDevelopmentRegion</key>
<string>en</string>
<key>CFBundleExecutable</key>
<string>$(EXECUTABLE_NAME)</string>
<key>CFBundleIdentifier</key>
<string>$(PRODUCT_BUNDLE_IDENTIFIER)</string>
<key>CFBundleInfoDictionaryVersion</key>
<string>6.0</string>
<key>CFBundleName</key>
<string>$(PRODUCT_NAME)</string>
<key>CFBundlePackageType</key>
<string>APPL</string>
<key>CFBundleShortVersionString</key>
<string>1.0</string>
<key>CFBundleSignature</key>
<string>????</string>
<key>CFBundleVersion</key>
<string>1</string>
<key>LSRequiresIPhoneOS</key>
<true/>
<key>NSAppTransportSecurity</key>
<dict>
<key>NSAllowsArbitraryLoads</key>
<true/>
</dict>
<key>NSCameraUsageDescription</key>
<string>WeChat</string>
<key>NSLocationWhenInUseUsageDescription</key>
<string>WeChat</string>
<key>UIRequiredDeviceCapabilities</key>
<array>
<string>armv7</string>
</array>
<key>UIRequiresFullScreen</key>
<true/>
<key>UIStatusBarHidden</key>
<false/>
<key>UIStatusBarStyle</key>
<string>UIStatusBarStyleLightContent</string>
<key>UISupportedInterfaceOrientations</key>
<array>
<string>UIInterfaceOrientationPortrait</string>
</array>
<key>UIViewControllerBasedStatusBarAppearance</key>
<false/>
</dict>
</plist>
``` | /content/code_sandbox/WeChat/Info.plist | xml | 2016-06-06T01:53:42 | 2024-08-05T09:45:48 | WeChat | zhengwenming/WeChat | 1,626 | 479 |
```objective-c
//
// AddressBookCell.m
// IHKApp
//
// Created by on 15/4/23.
//
#import "AddressBookCell.h"
@implementation AddressBookCell
- (void)awakeFromNib {
[super awakeFromNib];
self.photoIV.clipsToBounds = YES;
self.photoIV.backgroundColor = [UIColor lightGrayColor];
self.photoIV.layer.cornerRadius = 3;
self.selectionStyle = UITableViewCellSelectionStyleNone;
}
-(void)setFrendModel:(FriendInfoModel *)frendModel{
_frendModel = frendModel;
self.nameLabel.text = frendModel.userName;
if (frendModel.imgName) {
self.photoIV.image = [UIImage imageNamed:frendModel.imgName];
}else{
[self.photoIV sd_setImageWithURL:[NSURL URLWithString:frendModel.photo] placeholderImage:[UIImage imageNamed:@"default_portrait"] completed:^(UIImage *image, NSError *error, SDImageCacheType cacheType, NSURL *imageURL) {
}];
}
}
- (void)setSelected:(BOOL)selected animated:(BOOL)animated {
[super setSelected:selected animated:animated];
// Configure the view for the selected state
}
@end
``` | /content/code_sandbox/WeChat/ViewController/AddressBook通讯录/AddressBookCell.m | objective-c | 2016-06-06T01:53:42 | 2024-08-05T09:45:48 | WeChat | zhengwenming/WeChat | 1,626 | 252 |
```objective-c
/*!
@header SearchResultViewController.m
@abstract Githubpath_to_url
CSDN:path_to_url
@author Created by zhengwenming on 16/3/11
@version 1.00 16/3/11 Creation()
*/
#import "SearchResultViewController.h"
#import "AddressBookCell.h"
@interface SearchResultViewController ()<UITableViewDataSource,UITableViewDelegate>{
}
@property(nonatomic,strong)UITableView *resultTableView;
@property(nonatomic,strong)NSMutableArray *dataSourceA;
@property(nonatomic,strong)NSMutableArray *jasonArray;
@property(nonatomic,strong)UIView *headerView;
@property(nonatomic,strong)UILabel *footerLabel;
@end
@implementation SearchResultViewController
- (void)setItemClickAction:(void (^)(__kindof UIViewController *searchResultVC, id data))itemClickAction{
NSLog(@"setItemClickAction");
}
-(UIView *)headerView{
if (_headerView==nil) {
_headerView = [[UIView alloc] initWithFrame:CGRectMake(0, 0, kScreenWidth, 25)];
UILabel *headerLabel = [[UILabel alloc] initWithFrame:CGRectMake(10, 0, _headerView.frame.size.width-10, _headerView.frame.size.height)];
headerLabel.font = [UIFont systemFontOfSize:14.f];
headerLabel.text = @"";
headerLabel.textColor = [UIColor darkGrayColor];
[_headerView addSubview:headerLabel];
}
return _headerView;
}
-(UILabel *)footerLabel{
if (_footerLabel==nil) {
_footerLabel = [[UILabel alloc] initWithFrame:CGRectMake(0, 0, kScreenWidth, 40)];
_footerLabel.textAlignment = NSTextAlignmentCenter;
_footerLabel.textColor = [UIColor lightGrayColor];
_footerLabel.font = [UIFont systemFontOfSize:14.f];
}
return _footerLabel;
}
-(UITableView *)resultTableView{
if (_resultTableView==nil) {
_resultTableView = [UITableView new];
[_resultTableView registerNib:[UINib nibWithNibName:NSStringFromClass([AddressBookCell class]) bundle:nil] forCellReuseIdentifier:NSStringFromClass([AddressBookCell class])];
_resultTableView.estimatedSectionHeaderHeight = 0;
_resultTableView.estimatedSectionFooterHeight = 0;
_resultTableView.rowHeight = 50;
_resultTableView.showsVerticalScrollIndicator = NO;
_resultTableView.backgroundColor = [UIColor groupTableViewBackgroundColor];
_resultTableView.delegate = self;
_resultTableView.dataSource = self;
_resultTableView.keyboardDismissMode = UIScrollViewKeyboardDismissModeOnDrag;
}
return _resultTableView;
}
-(NSMutableArray *)dataSourceA{
if (_dataSourceA==nil) {
_dataSourceA = [NSMutableArray array];
}
return _dataSourceA;
}
- (void)viewDidLoad {
[super viewDidLoad];
[self.view addSubview:self.resultTableView];
self.resultTableView.tableHeaderView = self.headerView;
self.resultTableView.tableFooterView = self.footerLabel;
[self.resultTableView mas_makeConstraints:^(MASConstraintMaker *make) {
make.edges.mas_equalTo(0);
}];
// if (@available(iOS 11.0, *)) {
// self.resultTableView.contentInsetAdjustmentBehavior = UIScrollViewContentInsetAdjustmentNever;
// } else {
//
// }
NSData *friendsData = [NSData dataWithContentsOfURL:[NSURL fileURLWithPath:[[NSBundle mainBundle] pathForResource:@"AddressBook" ofType:@"json"]]];
NSDictionary *JSONDic = [NSJSONSerialization JSONObjectWithData:friendsData options:NSJSONReadingAllowFragments error:nil];
self.jasonArray = [NSMutableArray array];
for (NSDictionary *eachDic in JSONDic[@"friends"][@"row"]) {
[self.jasonArray addObject:[[FriendInfoModel alloc]initWithDic:eachDic]];
}
}
-(NSInteger)tableView:(UITableView *)tableView numberOfRowsInSection:(NSInteger)section{
return self.dataSourceA.count;
}
-(UITableViewCell *)tableView:(UITableView *)tableView cellForRowAtIndexPath:(NSIndexPath *)indexPath{
AddressBookCell *cell=(AddressBookCell *)[tableView dequeueReusableCellWithIdentifier:NSStringFromClass([AddressBookCell class])];
FriendInfoModel *friends = [self.dataSourceA objectAtIndex:indexPath.row];
cell.frendModel = friends;
return cell;
}
-(void)tableView:(UITableView *)tableView didSelectRowAtIndexPath:(NSIndexPath *)indexPath{
FriendInfoModel *friendModel = [self.dataSourceA objectAtIndex:indexPath.row];
if (self.itemSelectedAction) {
self.itemSelectedAction(self, friendModel);
}
}
#pragma mark - UISearchResultsUpdating
- (void)updateSearchResultsForSearchController:(UISearchController *)searchController {
// searchController.searchResultsController.view.hidden = NO;
if (searchController.searchBar.text) {
[self.dataSourceA removeAllObjects];
if ([PinyinHelper isIncludeChineseInString:searchController.searchBar.text]) {//
for(int i=0;i<self.jasonArray.count;i++){
FriendInfoModel *friends = self.jasonArray[i];
if ([friends.userName rangeOfString:searchController.searchBar.text].location!=NSNotFound) {
[self.dataSourceA addObject:friends];
}
}
}else{//
for(int i=0;i<self.jasonArray.count;i++){
HanyuPinyinOutputFormat *formatter = [[HanyuPinyinOutputFormat alloc] init];
formatter.caseType = CaseTypeUppercase;
formatter.vCharType = VCharTypeWithV;
formatter.toneType = ToneTypeWithoutTone;
//zhengshuang e
FriendInfoModel *friends = self.jasonArray[i];
NSString *outputPinyin=[[PinyinHelper toHanyuPinyinStringWithNSString:friends.userName withHanyuPinyinOutputFormat:formatter withNSString:@""] lowercaseString];
if ([[outputPinyin lowercaseString]rangeOfString:[searchController.searchBar.text lowercaseString]].location!=NSNotFound) {
[self.dataSourceA addObject:friends];
}
}
}
}
if (self.dataSourceA.count==0) {
self.footerLabel.text = @"";
self.resultTableView.tableHeaderView = nil;
}else{
self.footerLabel.text = @"";
self.resultTableView.tableHeaderView = self.headerView;
}
[self.resultTableView reloadData];
}
-(UIView *)tableView:(UITableView *)tableView viewForFooterInSection:(NSInteger)section{
return [UIView new];
}
-(CGFloat)tableView:(UITableView *)tableView heightForFooterInSection:(NSInteger)section{
return CGFLOAT_MIN;
}
-(UIView *)tableView:(UITableView *)tableView viewForHeaderInSection:(NSInteger)section{
return [UIView new];
}
-(CGFloat)tableView:(UITableView *)tableView heightForHeaderInSection:(NSInteger)section{
return CGFLOAT_MIN;
}
- (void)searchBarBookmarkButtonClicked:(UISearchBar *)searchBar
{
NSLog(@"");
}
- (void)didReceiveMemoryWarning {
[super didReceiveMemoryWarning];
}
@end
``` | /content/code_sandbox/WeChat/ViewController/AddressBook通讯录/SearchResultViewController.m | objective-c | 2016-06-06T01:53:42 | 2024-08-05T09:45:48 | WeChat | zhengwenming/WeChat | 1,626 | 1,410 |
```unknown
//
// WeChat.pch
// WeChat
//
// Created by zhengwenming on 16/6/4.
//
#ifndef WeChat_pch
#define WeChat_pch
#define kGAP 10
#define kThemeColor [UIColor colorWithRed:0 green:(190 / 255.0) blue:(12 / 255.0) alpha:1]
#define kAvatar_Size 40
#define IS_IPAD (UI_USER_INTERFACE_IDIOM() == UIUserInterfaceIdiomPad)
#define IS_IPHONE (UI_USER_INTERFACE_IDIOM() == UIUserInterfaceIdiomPhone)
#define kSCREEN_MAX_LENGTH (MAX(kScreenWidth, kScreenHeight))
#define kSCREEN_MIN_LENGTH (MIN(kScreenWidth, kScreenHeight))
#define IS_IPHONE4 (IS_IPHONE && kSCREEN_MAX_LENGTH < 568.0)
#define IS_IPHONE5 (IS_IPHONE && kSCREEN_MAX_LENGTH == 568.0)
#define IS_IPHONE6 (IS_IPHONE && kSCREEN_MAX_LENGTH == 667.0)
#define IS_IPHONE6P (IS_IPHONE && kSCREEN_MAX_LENGTH == 736.0)
#define IS_IPHONEX \
({BOOL isPhoneX = NO;\
if (@available(iOS 11.0, *)) {\
isPhoneX = [[UIApplication sharedApplication] delegate].window.safeAreaInsets.bottom > 0.0;\
}\
(isPhoneX);})
///
#define kScreenWidth [UIScreen mainScreen].bounds.size.width
#define kScreenHeight [UIScreen mainScreen].bounds.size.height
#define kAppDelegate ((AppDelegate *)[UIApplication sharedApplication].delegate)
#define kDeviceVersion [[UIDevice currentDevice].systemVersion floatValue]
#define kStatusBarHeight [[UIApplication sharedApplication] statusBarFrame].size.height
#define kSystemNavHeight 44.0
#define kNavbarHeight (kStatusBarHeight+kSystemNavHeight)
#define kTabBarHeight (IS_IPHONEX?(49.f+34.f):(49.f))
#define kBottomSafeHeight ((IS_IPHONEX)?(34):(0))
//category
#import "UIImageView+WebCache.h"
#import "NSString+Extension.h"
#import "UIViewController+WMExtension.h"
#import "UIBarButtonItem+addition.h"
#import "UIView+WMFrame.h"
//
#import "Masonry.h"
#import "YYKit.h"
#import "PinYin4Objc.h"
#import "CopyAbleLabel.h"
#import "YYFPSLabel.h"
#endif /* WeChat_pch */
``` | /content/code_sandbox/WeChat/WeChat.pch | unknown | 2016-06-06T01:53:42 | 2024-08-05T09:45:48 | WeChat | zhengwenming/WeChat | 1,626 | 505 |
```objective-c
//
// ContactsViewController.h
// WeChat
//
// Created by zhengwenming on 16/6/5.
//
#import "BaseViewController.h"
@interface ContactsViewController : BaseViewController
@end
``` | /content/code_sandbox/WeChat/ViewController/AddressBook通讯录/ContactsViewController.h | objective-c | 2016-06-06T01:53:42 | 2024-08-05T09:45:48 | WeChat | zhengwenming/WeChat | 1,626 | 46 |
```xml
<?xml version="1.0" encoding="UTF-8"?>
<document type="com.apple.InterfaceBuilder3.CocoaTouch.XIB" version="3.0" toolsVersion="15705" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" colorMatched="YES">
<device id="retina4_7" orientation="portrait" appearance="light"/>
<dependencies>
<deployment identifier="iOS"/>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="15706"/>
<capability name="documents saved in the Xcode 8 format" minToolsVersion="8.0"/>
</dependencies>
<objects>
<placeholder placeholderIdentifier="IBFilesOwner" id="-1" userLabel="File's Owner"/>
<placeholder placeholderIdentifier="IBFirstResponder" id="-2" customClass="UIResponder"/>
<tableViewCell contentMode="scaleToFill" selectionStyle="default" indentationWidth="10" rowHeight="44" id="KGk-i7-Jjw" customClass="AddressBookCell">
<rect key="frame" x="0.0" y="0.0" width="320" height="44"/>
<autoresizingMask key="autoresizingMask" flexibleMaxX="YES" flexibleMaxY="YES"/>
<tableViewCellContentView key="contentView" opaque="NO" clipsSubviews="YES" multipleTouchEnabled="YES" contentMode="center" tableViewCell="KGk-i7-Jjw" id="H2p-sc-9uM">
<rect key="frame" x="0.0" y="0.0" width="320" height="44"/>
<autoresizingMask key="autoresizingMask"/>
<subviews>
<imageView userInteractionEnabled="NO" contentMode="scaleToFill" horizontalHuggingPriority="251" verticalHuggingPriority="251" translatesAutoresizingMaskIntoConstraints="NO" id="Ktj-3W-bub">
<rect key="frame" x="15" y="8" width="28" height="28"/>
<constraints>
<constraint firstAttribute="width" secondItem="Ktj-3W-bub" secondAttribute="height" multiplier="1:1" id="if8-hO-Sgb"/>
</constraints>
</imageView>
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalCompressionResistancePriority="749" text="Mrs" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="sUk-GU-UJm">
<rect key="frame" x="52" y="13" width="258" height="18"/>
<fontDescription key="fontDescription" type="system" pointSize="15"/>
<color key="textColor" red="0.33333333333333331" green="0.33333333333333331" blue="0.33333333333333331" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
<nil key="highlightedColor"/>
</label>
</subviews>
<constraints>
<constraint firstAttribute="trailing" secondItem="sUk-GU-UJm" secondAttribute="trailing" constant="10" id="CR8-5y-JOC"/>
<constraint firstAttribute="bottom" secondItem="Ktj-3W-bub" secondAttribute="bottom" constant="8" id="TNp-AT-WM6"/>
<constraint firstItem="Ktj-3W-bub" firstAttribute="leading" secondItem="H2p-sc-9uM" secondAttribute="leading" constant="15" id="Y4w-EC-cue"/>
<constraint firstItem="Ktj-3W-bub" firstAttribute="top" secondItem="H2p-sc-9uM" secondAttribute="top" constant="8" id="bx6-6P-uLy"/>
<constraint firstItem="sUk-GU-UJm" firstAttribute="centerY" secondItem="H2p-sc-9uM" secondAttribute="centerY" id="eG1-Ds-5AT"/>
<constraint firstItem="sUk-GU-UJm" firstAttribute="leading" secondItem="Ktj-3W-bub" secondAttribute="trailing" constant="9" id="fvR-1K-cIq"/>
</constraints>
</tableViewCellContentView>
<connections>
<outlet property="nameLabel" destination="sUk-GU-UJm" id="nBI-mW-7lf"/>
<outlet property="photoIV" destination="Ktj-3W-bub" id="zDX-az-mB0"/>
</connections>
<point key="canvasLocation" x="136" y="355"/>
</tableViewCell>
</objects>
</document>
``` | /content/code_sandbox/WeChat/ViewController/AddressBook通讯录/AddressBookCell.xib | xml | 2016-06-06T01:53:42 | 2024-08-05T09:45:48 | WeChat | zhengwenming/WeChat | 1,626 | 1,089 |
```objective-c
//
// BaseViewController.m
// WeChat
//
// Created by zhengwenming on 16/6/4.
//
#import "BaseViewController.h"
@interface BaseViewController ()
@end
@implementation BaseViewController
- (void)viewDidLoad {
[super viewDidLoad];
self.view.backgroundColor = [UIColor whiteColor];
[UIApplication sharedApplication].statusBarStyle = UIStatusBarStyleDarkContent;
if (@available(ios 11.0,*)) {
// UIScrollView.appearance.contentInsetAdjustmentBehavior = UIScrollViewContentInsetAdjustmentAlways;
UITableView.appearance.estimatedRowHeight = 0;
UITableView.appearance.estimatedSectionFooterHeight = 0;
UITableView.appearance.estimatedSectionHeaderHeight = 0;
}else{
if([self respondsToSelector:@selector(automaticallyAdjustsScrollViewInsets)]){
// self.automaticallyAdjustsScrollViewInsets=NO;
}
}
}
-(int)getRandomNumber:(int)from to:(int)to{
return (int)(from + (arc4random() % (to - from + 1)));
}
-(void)touchesBegan:(NSSet<UITouch *> *)touches withEvent:(UIEvent *)event{
[super touchesBegan:touches withEvent:event];
[self.view endEditing:YES];
}
- (void)didReceiveMemoryWarning {
[super didReceiveMemoryWarning];
}
@end
``` | /content/code_sandbox/WeChat/ViewController/BaseViewController/BaseViewController.m | objective-c | 2016-06-06T01:53:42 | 2024-08-05T09:45:48 | WeChat | zhengwenming/WeChat | 1,626 | 281 |
```objective-c
//
// RootTabBarController.m
// WeChat
//
// Created by zhengwenming on 16/6/5.
//
#import "RootTabBarController.h"
#import "BaseNavigationController.h"
#define kSelImgKey @"selectedImageName"
@interface RootTabBarController ()
@end
@implementation RootTabBarController
- (void)viewDidLoad {
[super viewDidLoad];
[[UITabBar appearance] setTranslucent:NO];
NSArray *childItemsArray = @[
@{@"rootVCClassString" : @"HomeViewController",
@"title" : @"",
@"imageName" : @"tabbar_mainframe",
@"selectedImageName" : @"tabbar_mainframeHL"},
@{@"rootVCClassString" : @"ContactsViewController",
@"title" : @"",
@"imageName" : @"tabbar_contacts",
@"selectedImageName" : @"tabbar_contactsHL"},
@{@"rootVCClassString" : @"DiscoverViewController",
@"title" : @"",
@"imageName" : @"tabbar_discover",
@"selectedImageName" : @"tabbar_discoverHL"},
@{@"rootVCClassString" : @"MeViewController",
@"title" : @"",
@"imageName" : @"tabbar_me",
@"selectedImageName" : @"tabbar_meHL"} ];
[childItemsArray enumerateObjectsUsingBlock:^(NSDictionary *dict, NSUInteger idx, BOOL *stop) {
UIViewController *vc = [NSClassFromString(dict[@"rootVCClassString"]) new];
vc.title = dict[@"title"];
BaseNavigationController *nav = [[BaseNavigationController alloc] initWithRootViewController:vc];
UITabBarItem *item = nav.tabBarItem;
item.title = dict[@"title"];
item.image = [UIImage imageNamed:dict[@"imageName"]];
item.selectedImage = [[UIImage imageNamed:dict[@"selectedImageName"]] imageWithRenderingMode:UIImageRenderingModeAlwaysOriginal];
[item setTitleTextAttributes:@{NSForegroundColorAttributeName : kThemeColor} forState:UIControlStateSelected];
[self addChildViewController:nav];
}];
self.tabBar.tintColor = kThemeColor;
// self.tabBar.unselectedItemTintColor = kThemeColor;
}
- (void)didReceiveMemoryWarning {
[super didReceiveMemoryWarning];
}
@end
``` | /content/code_sandbox/WeChat/ViewController/TabBarController/RootTabBarController.m | objective-c | 2016-06-06T01:53:42 | 2024-08-05T09:45:48 | WeChat | zhengwenming/WeChat | 1,626 | 483 |
```objective-c
//
// BaseViewController.h
// WeChat
//
// Created by zhengwenming on 16/6/4.
//
#import <UIKit/UIKit.h>
@interface BaseViewController : UIViewController
-(int)getRandomNumber:(int)from to:(int)to;
@end
``` | /content/code_sandbox/WeChat/ViewController/BaseViewController/BaseViewController.h | objective-c | 2016-06-06T01:53:42 | 2024-08-05T09:45:48 | WeChat | zhengwenming/WeChat | 1,626 | 60 |
```objective-c
//
// BaseNavigationController.h
// WeChat
//
// Created by zhengwenming on 16/6/5.
//
#import <UIKit/UIKit.h>
#import "BaseViewController.h"
@interface BaseNavigationController : UINavigationController
@end
``` | /content/code_sandbox/WeChat/ViewController/BaseViewController/BaseNavigationController.h | objective-c | 2016-06-06T01:53:42 | 2024-08-05T09:45:48 | WeChat | zhengwenming/WeChat | 1,626 | 53 |
```objective-c
//
// BaseNavigationController.m
// WeChat
//
// Created by zhengwenming on 16/6/5.
//
#import "BaseNavigationController.h"
@interface BaseNavigationController ()<UINavigationControllerDelegate,UIGestureRecognizerDelegate>
@property (strong, nonatomic) UIPanGestureRecognizer *panGesture;
@end
@implementation BaseNavigationController
// rootViewcontroller
- (BOOL)gestureRecognizerShouldBegin:(UIGestureRecognizer *)gestureRecognizer {
//
CGPoint translation = [self.panGesture translationInView:gestureRecognizer.view];
if (translation.x <= 0) {
return NO;
}
if (self.childViewControllers.count > 1) {
if (self.visibleViewController.isHideBackItem) {
return NO;
}else {
if ([self.visibleViewController respondsToSelector:@selector(fullScreenGestureShouldBegin)]) {
return [self.visibleViewController fullScreenGestureShouldBegin];
}
}
}
return self.childViewControllers.count == 1 ? NO : YES;
}
- (void)viewDidLoad {
[super viewDidLoad];
[UIApplication sharedApplication].statusBarStyle = UIStatusBarStyleDefault;
UINavigationBar *bar = [UINavigationBar appearance];
bar.barTintColor = [UIColor whiteColor];
bar.tintColor = kThemeColor;
bar.titleTextAttributes = @{NSForegroundColorAttributeName : [UIColor blackColor]};
UIGestureRecognizer *systemGes = self.interactivePopGestureRecognizer;
id target = systemGes.delegate;
self.panGesture = [[UIPanGestureRecognizer alloc] initWithTarget:target action:NSSelectorFromString(@"handleNavigationTransition:")];
[self.interactivePopGestureRecognizer.view addGestureRecognizer:self.panGesture];
self.panGesture.delegate = self;
systemGes.enabled = NO;
}
-(void)pushViewController:(UIViewController *)viewController animated:(BOOL)animated{
if (self.viewControllers.count) {
viewController.hidesBottomBarWhenPushed = YES;
if (viewController.isHideBackItem) {
viewController.navigationItem.hidesBackButton = YES;
}else{
viewController.navigationItem.leftBarButtonItem = [UIBarButtonItem itemWithIcon:[viewController backIconName] highIcon:nil target:self action:@selector(back:)];
}
}
[super pushViewController:viewController animated:animated];
}
-(void)back:(UIBarButtonItem *)sender{
[self.view endEditing:YES];
if ([self.visibleViewController isKindOfClass:[BaseViewController class]]) {
BaseViewController *currentVC = (BaseViewController *)self.visibleViewController;
if (currentVC.popBlock) {
currentVC.popBlock(sender);
}else{
[self popViewControllerAnimated:YES];
}
}else{
[self popViewControllerAnimated:YES];
}
}
- (void)didReceiveMemoryWarning {
[super didReceiveMemoryWarning];
}
@end
``` | /content/code_sandbox/WeChat/ViewController/BaseViewController/BaseNavigationController.m | objective-c | 2016-06-06T01:53:42 | 2024-08-05T09:45:48 | WeChat | zhengwenming/WeChat | 1,626 | 562 |
```objective-c
//
// ConversationListCell.m
// WeChat
//
// Created by apple on 2020/4/8.
//
#import "ConversationListCell.h"
@interface ConversationListCell ()
@property(nonatomic,strong)UIImageView *avatarIV;
@property(nonatomic,strong)UILabel *nameLabel;
@property(nonatomic,strong)UILabel *lastMsgLabel;
@property(nonatomic,strong)UILabel *lastMsgTimeLabel;
@property(nonatomic,strong)UILabel *tipLabel;
@property(nonatomic,strong)UILabel *unReadBadge;
@property(nonatomic,strong)UIView *line;
@end
@implementation ConversationListCell
- (instancetype)initWithStyle:(UITableViewCellStyle)style reuseIdentifier:(NSString *)reuseIdentifier{
if (self=[super initWithStyle:style reuseIdentifier:reuseIdentifier]) {
self.avatarIV = [[UIImageView alloc] init];
self.avatarIV.layer.cornerRadius = 3;
self.avatarIV.clipsToBounds = YES;
[self.contentView addSubview:self.avatarIV];
[self.avatarIV mas_makeConstraints:^(MASConstraintMaker *make) {
make.leading.mas_equalTo(15);
make.top.mas_equalTo(12);
make.bottom.mas_equalTo(-12);
}];
[self.avatarIV mas_updateConstraints:^(MASConstraintMaker *make) {
make.width.mas_equalTo(self.avatarIV.mas_height);
}];
self.lastMsgTimeLabel = [[UILabel alloc] init];
self.lastMsgTimeLabel.textColor = [UIColor colorWithRed:160.f/256.f green:160.f/256.f blue:160.f/256.f alpha:1.0];
self.lastMsgTimeLabel.font = [UIFont systemFontOfSize:12.0f];
[self.contentView addSubview:self.lastMsgTimeLabel];
[self.lastMsgTimeLabel mas_makeConstraints:^(MASConstraintMaker *make) {
make.top.mas_equalTo(self.contentView).offset(12);
make.right.mas_equalTo(self.contentView).offset(-10);
}];
[self.lastMsgTimeLabel setContentCompressionResistancePriority:300 forAxis:UILayoutConstraintAxisHorizontal];
//or
self.nameLabel = [[UILabel alloc] init];
self.nameLabel.textColor = [UIColor blackColor];
self.nameLabel.font = [UIFont systemFontOfSize:17.0f];
[self.contentView addSubview:self.nameLabel];
[self.nameLabel mas_makeConstraints:^(MASConstraintMaker *make) {
make.left.mas_equalTo(self.avatarIV.mas_right).offset(10);
make.top.mas_equalTo(self.avatarIV).offset(1.5);
make.right.mas_lessThanOrEqualTo(self.lastMsgTimeLabel.mas_left).mas_offset(-5);
}];
[self.nameLabel setContentCompressionResistancePriority:100 forAxis:UILayoutConstraintAxisHorizontal];
//
self.lastMsgLabel = [[UILabel alloc] init];
self.lastMsgLabel.textColor = [UIColor lightGrayColor];
self.lastMsgLabel.font = [UIFont systemFontOfSize:13.0f];
[self.contentView addSubview:self.lastMsgLabel];
[self.lastMsgLabel mas_makeConstraints:^(MASConstraintMaker *make) {
make.top.mas_equalTo(self.nameLabel.mas_bottom).mas_offset(4);
make.left.mas_equalTo(self.nameLabel);
make.right.mas_lessThanOrEqualTo(self.contentView);
}];
[self.lastMsgLabel setContentCompressionResistancePriority:110 forAxis:UILayoutConstraintAxisHorizontal];
self.line = [UIView new];
self.line.backgroundColor = [UIColor lightGrayColor];
self.line.alpha = 0.65;
[self.contentView addSubview:self.line];
[self.line mas_makeConstraints:^(MASConstraintMaker *make) {
make.top.mas_equalTo(self.contentView.mas_bottom).mas_offset(-1);
make.left.mas_equalTo(self.lastMsgLabel);
make.height.mas_equalTo(1.0/(UIScreen.mainScreen.scale));
make.right.mas_equalTo(self.contentView);
}];
}
return self;
}
-(void)setConversationModel:(ConversationModel *)conversationModel{
_conversationModel = conversationModel;
[self.avatarIV sd_setImageWithURL:[NSURL URLWithString:conversationModel.avatarURL] placeholderImage:[UIImage imageNamed:@"placeholder"]];
self.nameLabel.text = conversationModel.userName;
self.lastMsgLabel.text = conversationModel.text;
self.lastMsgTimeLabel.text = conversationModel.time;
}
- (void)awakeFromNib {
[super awakeFromNib];
// Initialization code
}
- (void)setSelected:(BOOL)selected animated:(BOOL)animated {
[super setSelected:selected animated:animated];
// Configure the view for the selected state
}
@end
``` | /content/code_sandbox/WeChat/ViewController/Home微信/ConversationListCell.m | objective-c | 2016-06-06T01:53:42 | 2024-08-05T09:45:48 | WeChat | zhengwenming/WeChat | 1,626 | 889 |
```objective-c
//
// HomeViewController.m
// WeChat
//
// Created by zhengwenming on 16/6/5.
//
#import "HomeViewController.h"
#import "ConversationModel.h"
#import "ConversationListCell.h"
#import "FriendInfoModel.h"
#import "ConversationViewController.h"
@interface HomeViewController ()<UITableViewDelegate,UITableViewDataSource>
{
}
@property(nonatomic,strong)UITableView *homeTableView;
@property(nonatomic,strong)NSMutableArray *dataSource;
@end
@implementation HomeViewController
-(NSMutableArray *)dataSource{
if (_dataSource==nil) {
_dataSource = [NSMutableArray array];
}
return _dataSource;
}
- (void)viewDidLoad {
[super viewDidLoad];
NSData *friendsData = [NSData dataWithContentsOfURL:[NSURL fileURLWithPath:[[NSBundle mainBundle] pathForResource:@"AddressBook" ofType:@"json"]]];
NSDictionary *JSONDic = [NSJSONSerialization JSONObjectWithData:friendsData options:NSJSONReadingAllowFragments error:nil];
for (NSDictionary *eachDic in JSONDic[@"friends"][@"row"]) {
FriendInfoModel *fModel = [[FriendInfoModel alloc]initWithDic:eachDic];
ConversationModel *conversation = [ConversationModel new];
conversation.avatarURL = fModel.photo;
conversation.userName = fModel.userName;
conversation.userId = fModel.userId;
conversation.text = @"";
conversation.text = [NSString stringWithFormat:@"%@",fModel.userName];
conversation.time = @"";
[self.dataSource addObject:conversation];
}
self.homeTableView = [[UITableView alloc]initWithFrame:CGRectMake(0, kNavbarHeight, self.view.frame.size.width, kScreenHeight-kNavbarHeight-kTabBarHeight) style:UITableViewStyleGrouped];
self.homeTableView.dataSource = self;
self.homeTableView.rowHeight = 66.f;
self.homeTableView.delegate = self;
self.homeTableView.separatorStyle = UITableViewCellSeparatorStyleNone;
[self.homeTableView registerClass:NSClassFromString(@"ConversationListCell") forCellReuseIdentifier:@"ConversationListCell"];
self.homeTableView.keyboardDismissMode = UIScrollViewKeyboardDismissModeOnDrag;
[self.view addSubview:self.homeTableView];
}
-(NSInteger)tableView:(UITableView *)tableView numberOfRowsInSection:(NSInteger)section{
return self.dataSource.count;
}
-(UITableViewCell *)tableView:(UITableView *)tableView cellForRowAtIndexPath:(NSIndexPath *)indexPath{
ConversationListCell *cell = [tableView dequeueReusableCellWithIdentifier:@"ConversationListCell" forIndexPath:indexPath];
ConversationModel *conversation = self.dataSource[indexPath.row];
cell.conversationModel = conversation;
return cell;
}
-(UIView *)tableView:(UITableView *)tableView viewForFooterInSection:(NSInteger)section{
return [UIView new];
}
-(CGFloat)tableView:(UITableView *)tableView heightForFooterInSection:(NSInteger)section{
return CGFLOAT_MIN;
}
-(UIView *)tableView:(UITableView *)tableView viewForHeaderInSection:(NSInteger)section{
return [UIView new];
}
-(CGFloat)tableView:(UITableView *)tableView heightForHeaderInSection:(NSInteger)section{
return CGFLOAT_MIN;
}
-(void)tableView:(UITableView *)tableView didSelectRowAtIndexPath:(NSIndexPath *)indexPath{
[self.view endEditing:YES];
[tableView deselectRowAtIndexPath:indexPath animated:YES];
ConversationModel *conversationModel = self.dataSource[indexPath.row];
ConversationViewController *conversationVC = [ConversationViewController new];
conversationVC.navigationItem.title = conversationModel.userName;
[self.navigationController pushViewController:conversationVC animated:YES];
}
-(BOOL)tableView:(UITableView *)tableView canEditRowAtIndexPath:(NSIndexPath *)indexPath{
return YES;
}
- (void)tableView:(UITableView *)tableView commitEditingStyle:(UITableViewCellEditingStyle)editingStyle forRowAtIndexPath:(NSIndexPath *)indexPath {
if (editingStyle == UITableViewCellEditingStyleDelete) {
// Delete the row from the data source
//
[self.dataSource removeObjectAtIndex:indexPath.row];
//UI
[tableView deleteRowsAtIndexPaths:[NSArray arrayWithObject:indexPath] withRowAnimation:UITableViewRowAnimationFade];
}else if (editingStyle == UITableViewCellEditingStyleInsert) {
}
}
#pragma mark - # Delegate
//
- (NSArray *)tableView:(UITableView *)tableView editActionsForRowAtIndexPath:(NSIndexPath *)indexPath{
//
UITableViewRowAction *deleteAction = [UITableViewRowAction rowActionWithStyle:(UITableViewRowActionStyleDestructive) title:@"" handler:^(UITableViewRowAction *action, NSIndexPath *indexPath) {
NSLog(@"");
//1.
[self.dataSource removeObjectAtIndex:indexPath.row];
//2.UI
[tableView deleteRowsAtIndexPaths:@[indexPath] withRowAnimation:(UITableViewRowAnimationAutomatic)];
}];
//
deleteAction.backgroundColor = [UIColor redColor];
//
UITableViewRowAction *unReadRowAction =[UITableViewRowAction rowActionWithStyle:(UITableViewRowActionStyleDestructive) title:@"" handler:^(UITableViewRowAction *action, NSIndexPath *indexPath) {
NSLog(@"");
//1.
// [self.dataSource exchangeObjectAtIndex:indexPath.row withObjectAtIndex:0];
//2.UI
// NSIndexPath *firstIndexPath =[NSIndexPath indexPathForRow:0 inSection:indexPath.section];
// [tableView moveRowAtIndexPath:indexPath toIndexPath:firstIndexPath];
}];
unReadRowAction.backgroundColor = [UIColor blackColor];
return @[deleteAction,unReadRowAction];
}
- (void)didReceiveMemoryWarning {
[super didReceiveMemoryWarning];
}
@end
``` | /content/code_sandbox/WeChat/ViewController/Home微信/HomeViewController.m | objective-c | 2016-06-06T01:53:42 | 2024-08-05T09:45:48 | WeChat | zhengwenming/WeChat | 1,626 | 1,040 |
```objective-c
//
// HomeViewController.h
// WeChat
//
// Created by zhengwenming on 16/6/5.
//
#import "BaseViewController.h"
@interface HomeViewController : BaseViewController
@end
``` | /content/code_sandbox/WeChat/ViewController/Home微信/HomeViewController.h | objective-c | 2016-06-06T01:53:42 | 2024-08-05T09:45:48 | WeChat | zhengwenming/WeChat | 1,626 | 46 |
```objective-c
//
// ConversationListCell.h
// WeChat
//
// Created by apple on 2020/4/8.
//
#import <UIKit/UIKit.h>
#import "ConversationModel.h"
NS_ASSUME_NONNULL_BEGIN
@interface ConversationListCell : UITableViewCell
@property(nonatomic,retain)ConversationModel *conversationModel;
@end
NS_ASSUME_NONNULL_END
``` | /content/code_sandbox/WeChat/ViewController/Home微信/ConversationListCell.h | objective-c | 2016-06-06T01:53:42 | 2024-08-05T09:45:48 | WeChat | zhengwenming/WeChat | 1,626 | 71 |
```objective-c
//
// ConversationModel.m
// WeChat
//
// Created by apple on 2020/4/8.
//
#import "ConversationModel.h"
@implementation ConversationModel
@end
``` | /content/code_sandbox/WeChat/ViewController/Home微信/ConversationModel.m | objective-c | 2016-06-06T01:53:42 | 2024-08-05T09:45:48 | WeChat | zhengwenming/WeChat | 1,626 | 41 |
```objective-c
//
// ConversationModel.h
// WeChat
//
// Created by apple on 2020/4/8.
//
#import <Foundation/Foundation.h>
NS_ASSUME_NONNULL_BEGIN
@interface ConversationModel : NSObject
@property(nonatomic,copy)NSString *avatarURL;
@property(nonatomic,copy)NSString *userId;
@property(nonatomic,copy)NSString *userName;
@property(nonatomic,copy)NSString *text;
@property(nonatomic,copy)NSString *time;
@property(nonatomic,copy)NSString *extra;
@end
NS_ASSUME_NONNULL_END
``` | /content/code_sandbox/WeChat/ViewController/Home微信/ConversationModel.h | objective-c | 2016-06-06T01:53:42 | 2024-08-05T09:45:48 | WeChat | zhengwenming/WeChat | 1,626 | 96 |
```objective-c
//
// ConversationViewController.h
// WeChat
//
// Created by zhengwenming on 2020/4/13.
//
#import "BaseViewController.h"
NS_ASSUME_NONNULL_BEGIN
@interface ConversationViewController : BaseViewController
@end
NS_ASSUME_NONNULL_END
``` | /content/code_sandbox/WeChat/ViewController/Home微信/Conversation聊天页面/ConversationViewController.h | objective-c | 2016-06-06T01:53:42 | 2024-08-05T09:45:48 | WeChat | zhengwenming/WeChat | 1,626 | 57 |
```objective-c
//
// PersonCenterCell.h
// WeChat
//
// Created by zhengwenming on 16/6/5.
//
#import <UIKit/UIKit.h>
@interface PersonCenterCell : UITableViewCell
@property (weak, nonatomic) IBOutlet UIImageView *titleIV;
@property (weak, nonatomic) IBOutlet UILabel *titleLabel;
@end
``` | /content/code_sandbox/WeChat/ViewController/Me我/PersonCenterCell.h | objective-c | 2016-06-06T01:53:42 | 2024-08-05T09:45:48 | WeChat | zhengwenming/WeChat | 1,626 | 70 |
```objective-c
//
// ConversationViewController.m
// WeChat
//
// Created by zhengwenming on 2020/4/13.
//
#import "ConversationViewController.h"
@interface ConversationViewController()<UITableViewDelegate,UITableViewDataSource>
{
}
@property(nonatomic,strong)UITableView *messageList;
@property(nonatomic,strong)NSMutableArray *dataSource;
@end
@implementation ConversationViewController
-(NSMutableArray *)dataSource{
if (_dataSource==nil) {
_dataSource = [NSMutableArray array];
}
return _dataSource;
}
-(UITableView *)messageList{
if (_messageList==nil) {
_messageList = [UITableView new];
_messageList.delegate = self;
_messageList.dataSource = self;
_messageList.rowHeight = 50.f;
_messageList.backgroundColor = [UIColor groupTableViewBackgroundColor];
// [_messageList registerNib:[UINib nibWithNibName:NSStringFromClass([AddressBookCell class]) bundle:nil] forCellReuseIdentifier:NSStringFromClass([AddressBookCell class])];
}
return _messageList;
}
- (void)viewDidLoad {
[super viewDidLoad];
for (NSInteger index = 0; index<30; index++) {
[self.dataSource addObject:[NSString stringWithFormat:@"%ld",(long)index]];
}
[self.view addSubview:self.messageList];
[self.messageList mas_makeConstraints:^(MASConstraintMaker *make) {
make.edges.mas_equalTo(0);
}];
// dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(5 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{
// [self.messageList mas_updateConstraints:^(MASConstraintMaker *make) {
// make.edges.mas_equalTo(UIEdgeInsetsMake(0, 0, 80, 0));
// }];
// });
}
-(NSInteger)numberOfSectionsInTableView:(UITableView *)tableView{
return 1;
}
-(NSInteger)tableView:(UITableView *)tableView numberOfRowsInSection:(NSInteger)section{
return self.dataSource.count;
}
-(UITableViewCell *)tableView:(UITableView *)tableView cellForRowAtIndexPath:(NSIndexPath *)indexPath{
UITableViewCell *cell = [tableView dequeueReusableCellWithIdentifier:@"cell"];
if (!cell) {
cell = [[UITableViewCell alloc] initWithStyle:UITableViewCellStyleDefault reuseIdentifier:@"cell"];
}
cell.textLabel.text = self.dataSource[indexPath.row];
return cell;
}
@end
``` | /content/code_sandbox/WeChat/ViewController/Home微信/Conversation聊天页面/ConversationViewController.m | objective-c | 2016-06-06T01:53:42 | 2024-08-05T09:45:48 | WeChat | zhengwenming/WeChat | 1,626 | 460 |
```objective-c
//
// MeViewController.m
// WeChat
//
// Created by zhengwenming on 16/6/5.
//
#import "MeViewController.h"
#import "PersonCenterHeaderCell.h"
#import "PersonCenterCell.h"
@interface MeViewController ()<UITableViewDataSource,UITableViewDelegate>{
CGRect oldFrame;
UIImageView *fullScreenIV;
}
@property(nonatomic,strong)UITableView *personCenterTableView;
@property(nonatomic,retain)NSMutableArray *dataArray;
@end
@implementation MeViewController
-(NSMutableArray *)dataArray{
if (_dataArray==nil) {
_dataArray = [NSMutableArray arrayWithObjects:
@[@{@"title":@"",@"icon":@"MoreExpressionShops"}],
@[@{@"title":@"",@"icon":@"ff_IconShowAlbum"},@{@"title":@"",@"icon":@"MoreMyFavorites"},@{@"title":@"",@"icon":@"MoreMyBankCard"},@{@"title":@"",@"icon":@"MyCardPackageIcon"}],
@[@{@"title":@"",@"icon":@"MoreExpressionShops"}],
@[@{@"title":@"",@"icon":@"MoreSetting"}], nil];
}
return _dataArray;
}
-(UITableView *)personCenterTableView{
if (_personCenterTableView==nil) {
_personCenterTableView = [[UITableView new]initWithFrame:CGRectMake(0, 0, 0, 0) style:UITableViewStyleGrouped];
_personCenterTableView.delegate = self;
_personCenterTableView.dataSource = self;
[_personCenterTableView registerNib:[UINib nibWithNibName:@"PersonCenterHeaderCell" bundle:nil] forCellReuseIdentifier:@"PersonCenterHeaderCell"];
[_personCenterTableView registerNib:[UINib nibWithNibName:@"PersonCenterCell" bundle:nil] forCellReuseIdentifier:@"PersonCenterCell"];
_personCenterTableView.backgroundColor = [UIColor groupTableViewBackgroundColor];
}
return _personCenterTableView;
}
#pragma mark
#pragma mark viewDidLoad
- (void)viewDidLoad {
[super viewDidLoad];
[self.view addSubview:self.personCenterTableView];
[self.personCenterTableView mas_makeConstraints:^(MASConstraintMaker *make) {
make.edges.mas_equalTo(UIEdgeInsetsMake(0, 0, 0, 0));
}];
}
#pragma mark
#pragma mark numberOfSections
-(NSInteger)numberOfSectionsInTableView:(UITableView *)tableView{
return self.dataArray.count+1;
}
-(NSInteger)tableView:(UITableView *)tableView numberOfRowsInSection:(NSInteger)section{
if (section) {
NSArray *rowArray = self.dataArray[section-1];
return rowArray.count;
}
return 1;
}
-(CGFloat)tableView:(UITableView *)tableView heightForRowAtIndexPath:(NSIndexPath *)indexPath{
return indexPath.section?48:82;
}
-(CGFloat)tableView:(UITableView *)tableView heightForHeaderInSection:(NSInteger)section{
return 10;
}
-(CGFloat)tableView:(UITableView *)tableView heightForFooterInSection:(NSInteger)section{
return CGFLOAT_MIN;
}
-(void)tapForOriginal:(UITapGestureRecognizer *)tap{
[UIView animateWithDuration:0.3 animations:^{
fullScreenIV.frame = oldFrame;
fullScreenIV.alpha = 0.03;
} completion:^(BOOL finished) {
fullScreenIV.alpha = 1;
[fullScreenIV removeFromSuperview];
}];
}
-(void)tapForFullScreen:(UITapGestureRecognizer *)tap{
UIImageView *avatarIV = (UIImageView *)[tap view];
oldFrame = [avatarIV convertRect:avatarIV.bounds toView:[UIApplication sharedApplication].keyWindow];
if (fullScreenIV==nil) {
fullScreenIV= [[UIImageView alloc]initWithFrame:avatarIV.frame];
}
fullScreenIV.backgroundColor = [UIColor blackColor];
fullScreenIV.userInteractionEnabled = YES;
fullScreenIV.image = avatarIV.image;
fullScreenIV.contentMode = UIViewContentModeScaleAspectFit;
[[UIApplication sharedApplication].keyWindow addSubview:fullScreenIV];
[UIView animateWithDuration:0.3 animations:^{
fullScreenIV.frame = CGRectMake(0,0,kScreenWidth, kScreenHeight);
}];
UITapGestureRecognizer *originalTap = [[UITapGestureRecognizer alloc]initWithTarget:self action:@selector(tapForOriginal:)];
[fullScreenIV addGestureRecognizer:originalTap];
}
-(UITableViewCell *)tableView:(UITableView *)tableView cellForRowAtIndexPath:(NSIndexPath *)indexPath{
if (indexPath.section==0) {
PersonCenterHeaderCell *headerCell = [tableView dequeueReusableCellWithIdentifier:@"PersonCenterHeaderCell"];
headerCell.selectionStyle = UITableViewCellSelectionStyleNone;
headerCell.avatarIV.userInteractionEnabled = YES;
UITapGestureRecognizer *fullScreenTap = [[UITapGestureRecognizer alloc]initWithTarget:self action:@selector(tapForFullScreen:)];
[headerCell.avatarIV addGestureRecognizer:fullScreenTap];
headerCell.accessoryType = UITableViewCellAccessoryDisclosureIndicator;
return headerCell;
}else{
PersonCenterCell *cell = [tableView dequeueReusableCellWithIdentifier:@"PersonCenterCell"];
if (indexPath.section==2) {//
switch (indexPath.row) {
case 0:
cell.titleIV.image = [UIImage imageNamed:@"ff_IconShowAlbum"];
cell.titleLabel.text = @"";
break;
case 1:
cell.titleIV.image = [UIImage imageNamed:@"MoreMyFavorites"];
cell.titleLabel.text = @"";
break;
case 2:
cell.titleIV.image = [UIImage imageNamed:@"MoreMyBankCard"];
cell.titleLabel.text = @"";
break;
case 3:
cell.titleIV.image = [UIImage imageNamed:@"MyCardPackageIcon"];
cell.titleLabel.text = @"";
break;
default:
break;
}
}else if (indexPath.section==3){//
cell.titleIV.image = [UIImage imageNamed:@"MoreExpressionShops"];
cell.titleLabel.text = @"";
}else if(indexPath.section==4){
cell.titleIV.image = [UIImage imageNamed:@"MoreSetting"];
cell.titleLabel.text = @"";
}
return cell;
}
}
-(void)tableView:(UITableView *)tableView willDisplayCell:(UITableViewCell *)cell forRowAtIndexPath:(NSIndexPath *)indexPath{
[self hiddenUITableViewCellSeparatorView:cell];
}
-(void)hiddenUITableViewCellSeparatorView:(UITableViewCell *)cell{
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(0.4 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{
for (UIView *aView in cell.subviews) {
if ([aView isKindOfClass:NSClassFromString(@"_UITableViewCellSeparatorView")]&&aView.frame.origin.x==0) {
aView.hidden = YES;
}
}
});
}
-(void)tableView:(UITableView *)tableView didSelectRowAtIndexPath:(NSIndexPath *)indexPath{
[tableView deselectRowAtIndexPath:indexPath animated:YES];
}
- (void)didReceiveMemoryWarning {
[super didReceiveMemoryWarning];
}
-(void)dealloc{
fullScreenIV = nil;
}
@end
``` | /content/code_sandbox/WeChat/ViewController/Me我/MeViewController.m | objective-c | 2016-06-06T01:53:42 | 2024-08-05T09:45:48 | WeChat | zhengwenming/WeChat | 1,626 | 1,360 |
```xml
<?xml version="1.0" encoding="UTF-8"?>
<document type="com.apple.InterfaceBuilder3.CocoaTouch.XIB" version="3.0" toolsVersion="15705" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" colorMatched="YES">
<device id="retina4_7" orientation="portrait" appearance="light"/>
<dependencies>
<deployment identifier="iOS"/>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="15706"/>
<capability name="documents saved in the Xcode 8 format" minToolsVersion="8.0"/>
</dependencies>
<objects>
<placeholder placeholderIdentifier="IBFilesOwner" id="-1" userLabel="File's Owner"/>
<placeholder placeholderIdentifier="IBFirstResponder" id="-2" customClass="UIResponder"/>
<tableViewCell contentMode="scaleToFill" selectionStyle="default" accessoryType="disclosureIndicator" indentationWidth="10" rowHeight="37" id="KGk-i7-Jjw" customClass="PersonCenterCell">
<rect key="frame" x="0.0" y="0.0" width="320" height="37"/>
<autoresizingMask key="autoresizingMask" flexibleMaxX="YES" flexibleMaxY="YES"/>
<tableViewCellContentView key="contentView" opaque="NO" clipsSubviews="YES" multipleTouchEnabled="YES" contentMode="center" tableViewCell="KGk-i7-Jjw" id="H2p-sc-9uM">
<rect key="frame" x="0.0" y="0.0" width="293" height="37"/>
<autoresizingMask key="autoresizingMask"/>
<subviews>
<imageView userInteractionEnabled="NO" contentMode="scaleToFill" horizontalHuggingPriority="251" verticalHuggingPriority="251" image="ProfileLockOn" translatesAutoresizingMaskIntoConstraints="NO" id="XPz-DL-DpT">
<rect key="frame" x="20" y="11" width="15" height="15"/>
<constraints>
<constraint firstAttribute="width" secondItem="XPz-DL-DpT" secondAttribute="height" multiplier="1:1" id="umk-Pg-jwF"/>
</constraints>
</imageView>
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="" textAlignment="natural" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="XqL-7R-8IR">
<rect key="frame" x="50" y="11" width="244" height="15"/>
<fontDescription key="fontDescription" type="system" pointSize="16"/>
<color key="textColor" red="0.0" green="0.0" blue="0.0" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
<nil key="highlightedColor"/>
</label>
</subviews>
<constraints>
<constraint firstItem="XPz-DL-DpT" firstAttribute="top" secondItem="H2p-sc-9uM" secondAttribute="top" constant="11" id="4sb-oX-oZA"/>
<constraint firstAttribute="trailing" secondItem="XqL-7R-8IR" secondAttribute="trailing" constant="127" id="5aH-1n-Mx8"/>
<constraint firstItem="XqL-7R-8IR" firstAttribute="centerY" secondItem="H2p-sc-9uM" secondAttribute="centerY" id="8c7-LT-sym"/>
<constraint firstAttribute="bottom" secondItem="XPz-DL-DpT" secondAttribute="bottom" constant="11" id="9dI-ww-W5v"/>
<constraint firstItem="XqL-7R-8IR" firstAttribute="leading" secondItem="XPz-DL-DpT" secondAttribute="trailing" constant="27" id="OOP-Fb-mvv"/>
<constraint firstItem="XqL-7R-8IR" firstAttribute="leading" secondItem="XPz-DL-DpT" secondAttribute="trailing" constant="15" id="WOq-Xn-ONY"/>
<constraint firstItem="XqL-7R-8IR" firstAttribute="centerX" secondItem="H2p-sc-9uM" secondAttribute="centerX" id="Xub-ZC-V5W"/>
<constraint firstItem="XPz-DL-DpT" firstAttribute="centerX" secondItem="H2p-sc-9uM" secondAttribute="centerX" id="n4R-Ps-MvA"/>
<constraint firstItem="XPz-DL-DpT" firstAttribute="leading" secondItem="H2p-sc-9uM" secondAttribute="leading" constant="8" id="oxV-Pu-TJk"/>
<constraint firstItem="XqL-7R-8IR" firstAttribute="bottom" secondItem="H2p-sc-9uM" secondAttribute="bottomMargin" id="ubZ-2l-Sl5"/>
<constraint firstAttribute="trailing" secondItem="XqL-7R-8IR" secondAttribute="trailing" id="wOE-jw-PBS"/>
<constraint firstItem="XPz-DL-DpT" firstAttribute="leading" secondItem="H2p-sc-9uM" secondAttribute="leading" constant="20" id="x0K-WQ-gx7"/>
</constraints>
<variation key="default">
<mask key="constraints">
<exclude reference="n4R-Ps-MvA"/>
<exclude reference="oxV-Pu-TJk"/>
<exclude reference="5aH-1n-Mx8"/>
<exclude reference="OOP-Fb-mvv"/>
<exclude reference="Xub-ZC-V5W"/>
</mask>
</variation>
</tableViewCellContentView>
<connections>
<outlet property="titleIV" destination="XPz-DL-DpT" id="sGL-pE-XAq"/>
<outlet property="titleLabel" destination="XqL-7R-8IR" id="Lzg-KL-suk"/>
</connections>
<point key="canvasLocation" x="250" y="314.5"/>
</tableViewCell>
</objects>
<resources>
<image name="ProfileLockOn" width="17" height="17"/>
</resources>
</document>
``` | /content/code_sandbox/WeChat/ViewController/Me我/PersonCenterCell.xib | xml | 2016-06-06T01:53:42 | 2024-08-05T09:45:48 | WeChat | zhengwenming/WeChat | 1,626 | 1,491 |
```objective-c
//
// PersonCenterCell.m
// WeChat
//
// Created by zhengwenming on 16/6/5.
//
#import "PersonCenterCell.h"
@implementation PersonCenterCell
- (void)awakeFromNib {
[super awakeFromNib];
// Initialization code
}
- (void)setSelected:(BOOL)selected animated:(BOOL)animated {
[super setSelected:selected animated:animated];
// Configure the view for the selected state
}
@end
``` | /content/code_sandbox/WeChat/ViewController/Me我/PersonCenterCell.m | objective-c | 2016-06-06T01:53:42 | 2024-08-05T09:45:48 | WeChat | zhengwenming/WeChat | 1,626 | 97 |
```objective-c
//
// PersonCenterHeaderCell.m
// WeChat
//
// Created by zhengwenming on 16/6/5.
//
#import "PersonCenterHeaderCell.h"
@implementation PersonCenterHeaderCell
- (void)awakeFromNib {
[super awakeFromNib];
self.avatarIV.clipsToBounds= YES;
self.avatarIV.layer.cornerRadius = 4.f;
// Initialization code
}
- (void)setSelected:(BOOL)selected animated:(BOOL)animated {
[super setSelected:selected animated:animated];
// Configure the view for the selected state
}
@end
``` | /content/code_sandbox/WeChat/ViewController/Me我/PersonCenterHeaderCell.m | objective-c | 2016-06-06T01:53:42 | 2024-08-05T09:45:48 | WeChat | zhengwenming/WeChat | 1,626 | 120 |
```objective-c
//
// MeViewController.h
// WeChat
//
// Created by zhengwenming on 16/6/5.
//
#import "BaseViewController.h"
@interface MeViewController : BaseViewController
@end
``` | /content/code_sandbox/WeChat/ViewController/Me我/MeViewController.h | objective-c | 2016-06-06T01:53:42 | 2024-08-05T09:45:48 | WeChat | zhengwenming/WeChat | 1,626 | 46 |
```objective-c
//
// PersonCenterHeaderCell.h
// WeChat
//
// Created by zhengwenming on 16/6/5.
//
#import <UIKit/UIKit.h>
@interface PersonCenterHeaderCell : UITableViewCell
@property (weak, nonatomic) IBOutlet UIImageView *avatarIV;
@end
``` | /content/code_sandbox/WeChat/ViewController/Me我/PersonCenterHeaderCell.h | objective-c | 2016-06-06T01:53:42 | 2024-08-05T09:45:48 | WeChat | zhengwenming/WeChat | 1,626 | 61 |
```objective-c
//
// DiscoverViewController.m
// WeChat
//
// Created by zhengwenming on 16/6/5.
//
#import "DiscoverViewController.h"
#import "WMTimeLineViewController.h"
#import "PersonCenterCell.h"
@interface DiscoverViewController ()<UITableViewDelegate,UITableViewDataSource>{
}
@property(nonatomic,strong)UITableView *discoverTable;
///VC
@property(nonatomic,strong)WMTimeLineViewController *timeLineVC;
@end
@implementation DiscoverViewController
///
-(WMTimeLineViewController *)timeLineVC{
if (_timeLineVC==nil) {
_timeLineVC = [[WMTimeLineViewController alloc]init];
}
return _timeLineVC;
}
-(UITableView *)discoverTable{
if (_discoverTable==nil) {
_discoverTable = [[UITableView alloc]initWithFrame:CGRectMake(0, 0, 0, 0) style:UITableViewStyleGrouped];
_discoverTable.delegate = self;
_discoverTable.dataSource = self;
[_discoverTable registerNib:[UINib nibWithNibName:@"PersonCenterCell" bundle:nil] forCellReuseIdentifier:@"PersonCenterCell"];
}
return _discoverTable;
}
-(void)viewWillAppear:(BOOL)animated{
[super viewWillAppear:animated];
}
- (void)viewDidLoad {
[super viewDidLoad];
[self.view addSubview:self.discoverTable];
[self.discoverTable mas_makeConstraints:^(MASConstraintMaker *make) {
make.edges.mas_equalTo(0);
}];
}
-(NSInteger)numberOfSectionsInTableView:(UITableView *)tableView{
return 4;
}
-(NSInteger)tableView:(UITableView *)tableView numberOfRowsInSection:(NSInteger)section{
switch (section) {
case 0:
return 1;
break;
case 1:
return 2;
break;
case 2:
return 1;
break;
case 3:
return 2;
break;
default:
break;
}
return 0;
}
-(CGFloat)tableView:(UITableView *)tableView heightForRowAtIndexPath:(NSIndexPath *)indexPath{
return 48;
}
-(CGFloat)tableView:(UITableView *)tableView heightForHeaderInSection:(NSInteger)section{
return 20;
}
-(CGFloat)tableView:(UITableView *)tableView heightForFooterInSection:(NSInteger)section{
return CGFLOAT_MIN;
}
-(UITableViewCell *)tableView:(UITableView *)tableView cellForRowAtIndexPath:(NSIndexPath *)indexPath{
PersonCenterCell *cell = [tableView dequeueReusableCellWithIdentifier:@"PersonCenterCell"];
if (indexPath.section==0) {//
cell.titleIV.image = [UIImage imageNamed:@"ff_IconShowAlbum"];
cell.titleLabel.text = @"";
}else if (indexPath.section==1){//
if (indexPath.row==0) {
cell.titleIV.image = [UIImage imageNamed:@"ff_IconQRCode"];
cell.titleLabel.text = @"";
}else if (indexPath.row==1){
cell.titleIV.image = [UIImage imageNamed:@"ff_IconShake"];
cell.titleLabel.text = @"";
}
}else if(indexPath.section==2){
cell.titleIV.image = [UIImage imageNamed:@"ff_IconLocationService"];
cell.titleLabel.text = @"";
}else if(indexPath.section==3){
if (indexPath.row==0) {
cell.titleIV.image = [UIImage imageNamed:@"CreditCard_ShoppingBag"];
cell.titleLabel.text = @"";
}else if (indexPath.row==1){
cell.titleIV.image = [UIImage imageNamed:@"MoreGame"];
cell.titleLabel.text = @"";
}
}
return cell;
}
-(void)tableView:(UITableView *)tableView willDisplayCell:(UITableViewCell *)cell forRowAtIndexPath:(NSIndexPath *)indexPath{
[self hiddenUITableViewCellSeparatorView:cell];
}
-(void)hiddenUITableViewCellSeparatorView:(UITableViewCell *)cell{
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(0.4 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{
for (UIView *aView in cell.subviews) {
if ([aView isKindOfClass:NSClassFromString(@"_UITableViewCellSeparatorView")]&&aView.frame.origin.x==0) {
aView.hidden = YES;
}
}
});
}
-(void)tableView:(UITableView *)tableView didSelectRowAtIndexPath:(NSIndexPath *)indexPath{
[tableView deselectRowAtIndexPath:indexPath animated:YES];
if (indexPath.section==0) {
// [self.navigationController pushViewController:self.timeLineTwo animated:YES];
[self.navigationController pushViewController:[[WMTimeLineViewController alloc]init] animated:YES];
}
}
- (void)didReceiveMemoryWarning {
[super didReceiveMemoryWarning];
}
@end
``` | /content/code_sandbox/WeChat/ViewController/Discover发现-朋友圈/DiscoverViewController.m | objective-c | 2016-06-06T01:53:42 | 2024-08-05T09:45:48 | WeChat | zhengwenming/WeChat | 1,626 | 906 |
```objective-c
//
// DiscoverViewController.h
// WeChat
//
// Created by zhengwenming on 16/6/5.
//
#import "BaseViewController.h"
@interface DiscoverViewController : BaseViewController
@end
``` | /content/code_sandbox/WeChat/ViewController/Discover发现-朋友圈/DiscoverViewController.h | objective-c | 2016-06-06T01:53:42 | 2024-08-05T09:45:48 | WeChat | zhengwenming/WeChat | 1,626 | 46 |
```objective-c
//
// LikeUsersCell.m
// WeChat
//
// Created by zhengwenming on 2017/9/23.
//
#import "LikeUsersCell.h"
@implementation LikeUsersCell
- (void)awakeFromNib {
[super awakeFromNib];
UIImage *image = [UIImage imageNamed:@"LikeCmtBg"];
image = [image stretchableImageWithLeftCapWidth:image.size.width * 0.5 topCapHeight:image.size.height * 0.5];
self.backgroundView = [[UIImageView alloc]initWithImage:image];
// self.backgroundColor = [UIColor colorWithRed:236.0/256.0 green:236.0/256.0 blue:236.0/256.0 alpha:1.0];
self.likeUsersLabel.lineBreakMode = NSLineBreakByCharWrapping;
self.likeUsersLabel.numberOfLines = 0;
self.selectionStyle = UITableViewCellSelectionStyleNone;
}
-(void)setModel:(CommentInfoModel *)model{
_model = model;
self.likeUsersLabel.attributedText = model.likeUsersAttributedText;
}
#pragma mark
#pragma mark cellleftSpace10
-(void)setFrame:(CGRect)frame{
CGFloat leftSpace = 2*kGAP+kAvatar_Size;
frame.origin.x = leftSpace;
frame.size.width = kScreenWidth - leftSpace -10;
[super setFrame:frame];
}
- (void)setSelected:(BOOL)selected animated:(BOOL)animated {
[super setSelected:selected animated:animated];
}
@end
``` | /content/code_sandbox/WeChat/ViewController/Discover发现-朋友圈/朋友圈-单个tableView/LikeUsersCell.m | objective-c | 2016-06-06T01:53:42 | 2024-08-05T09:45:48 | WeChat | zhengwenming/WeChat | 1,626 | 312 |
```objective-c
//
// MessageInfoModel2.h
// WeChat
//
// Created by zhengwenming on 2017/9/21.
//
#import <Foundation/Foundation.h>
#import "CommentInfoModel.h"
#import "FriendInfoModel.h"
#import "Layout.h"
@interface MessageInfoModel : NSObject
@property (nonatomic, copy) NSString *cid;
///id
@property(nonatomic,copy)NSString *message_id;
///
@property(nonatomic,copy)NSString *message;
///
@property (nonatomic, assign) BOOL isExpand;
///
@property(nonatomic,copy)NSString *timeTag;
///
@property(nonatomic,copy)NSString *message_type;
///id
@property(nonatomic,copy)NSString *userId;
///
@property(nonatomic,copy)NSString *userName;
///
@property(nonatomic,copy)NSString *photo;
///
@property(nonatomic,copy)NSMutableArray *messageSmallPics;
///
@property(nonatomic,copy)NSMutableArray *messageBigPics;
///
@property(nonatomic,copy)NSMutableArray *commentModelArray;
///sectionHeaderView
@property (nonatomic, assign) CGFloat headerHeight;
///
@property (nonatomic, strong) Layout *textLayout;
///
@property (nonatomic, strong) Layout *jggLayout;
@property(nonatomic,strong)NSMutableAttributedString *mutablAttrStr;
-(instancetype)initWithDic:(NSDictionary *)dic;
@end
``` | /content/code_sandbox/WeChat/ViewController/Discover发现-朋友圈/朋友圈-单个tableView/MessageInfoModel.h | objective-c | 2016-06-06T01:53:42 | 2024-08-05T09:45:48 | WeChat | zhengwenming/WeChat | 1,626 | 247 |
```xml
<?xml version="1.0" encoding="UTF-8"?>
<document type="com.apple.InterfaceBuilder3.CocoaTouch.XIB" version="3.0" toolsVersion="15705" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" colorMatched="YES">
<device id="retina6_1" orientation="portrait" appearance="light"/>
<dependencies>
<deployment identifier="iOS"/>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="15706"/>
<capability name="documents saved in the Xcode 8 format" minToolsVersion="8.0"/>
</dependencies>
<objects>
<placeholder placeholderIdentifier="IBFilesOwner" id="-1" userLabel="File's Owner"/>
<placeholder placeholderIdentifier="IBFirstResponder" id="-2" customClass="UIResponder"/>
<tableViewCell contentMode="scaleToFill" selectionStyle="default" accessoryType="disclosureIndicator" indentationWidth="10" rowHeight="82" id="KGk-i7-Jjw" customClass="PersonCenterHeaderCell">
<rect key="frame" x="0.0" y="0.0" width="320" height="82"/>
<autoresizingMask key="autoresizingMask" flexibleMaxX="YES" flexibleMaxY="YES"/>
<tableViewCellContentView key="contentView" opaque="NO" clipsSubviews="YES" multipleTouchEnabled="YES" contentMode="center" tableViewCell="KGk-i7-Jjw" id="H2p-sc-9uM">
<rect key="frame" x="0.0" y="0.0" width="293" height="82"/>
<autoresizingMask key="autoresizingMask"/>
<subviews>
<imageView userInteractionEnabled="NO" contentMode="scaleToFill" horizontalHuggingPriority="251" verticalHuggingPriority="251" image="avatar" translatesAutoresizingMaskIntoConstraints="NO" id="aZj-Ju-W07">
<rect key="frame" x="15" y="10" width="62" height="62"/>
<constraints>
<constraint firstAttribute="width" secondItem="aZj-Ju-W07" secondAttribute="height" multiplier="1:1" id="aDP-3n-wIq"/>
</constraints>
</imageView>
<imageView userInteractionEnabled="NO" contentMode="scaleToFill" horizontalHuggingPriority="251" verticalHuggingPriority="251" image="qrcode" translatesAutoresizingMaskIntoConstraints="NO" id="2x2-Kd-rmz">
<rect key="frame" x="265.5" y="31" width="17.5" height="20.5"/>
<constraints>
<constraint firstAttribute="width" secondItem="2x2-Kd-rmz" secondAttribute="height" multiplier="17:20" id="GtC-aa-Pe2"/>
</constraints>
</imageView>
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="" textAlignment="natural" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="7M4-hK-NBi">
<rect key="frame" x="87" y="15" width="164" height="25.5"/>
<fontDescription key="fontDescription" type="system" pointSize="21"/>
<color key="textColor" red="0.0" green="0.0" blue="0.0" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
<nil key="highlightedColor"/>
</label>
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text=":zhengwenming1988" textAlignment="natural" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="ZzX-dZ-XZR">
<rect key="frame" x="87" y="50" width="196" height="18"/>
<fontDescription key="fontDescription" type="system" pointSize="15"/>
<color key="textColor" white="0.66666666666666663" alpha="1" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
<nil key="highlightedColor"/>
</label>
</subviews>
<constraints>
<constraint firstItem="2x2-Kd-rmz" firstAttribute="top" secondItem="H2p-sc-9uM" secondAttribute="top" constant="31" id="2tQ-KO-4gs"/>
<constraint firstItem="7M4-hK-NBi" firstAttribute="top" secondItem="H2p-sc-9uM" secondAttribute="top" constant="15" id="B8L-rY-UVQ"/>
<constraint firstItem="7M4-hK-NBi" firstAttribute="leading" secondItem="aZj-Ju-W07" secondAttribute="trailing" constant="10" id="Ee1-1A-ldC"/>
<constraint firstAttribute="bottom" secondItem="ZzX-dZ-XZR" secondAttribute="bottom" constant="14" id="GbL-2h-e0f"/>
<constraint firstAttribute="bottom" secondItem="aZj-Ju-W07" secondAttribute="bottom" constant="10" id="NF6-2s-JTJ"/>
<constraint firstAttribute="bottom" secondItem="2x2-Kd-rmz" secondAttribute="bottom" constant="30.5" id="Pot-ci-8fe"/>
<constraint firstItem="aZj-Ju-W07" firstAttribute="top" secondItem="H2p-sc-9uM" secondAttribute="top" constant="10" id="R10-We-Azp"/>
<constraint firstItem="ZzX-dZ-XZR" firstAttribute="leading" secondItem="aZj-Ju-W07" secondAttribute="trailing" constant="10" id="WHR-Mb-r1c"/>
<constraint firstAttribute="trailing" secondItem="7M4-hK-NBi" secondAttribute="trailing" constant="42" id="efb-kk-Rqq"/>
<constraint firstAttribute="trailing" secondItem="ZzX-dZ-XZR" secondAttribute="trailing" constant="10" id="iZL-si-Ph8"/>
<constraint firstItem="aZj-Ju-W07" firstAttribute="leading" secondItem="H2p-sc-9uM" secondAttribute="leading" constant="15" id="lHO-T7-cRr"/>
<constraint firstAttribute="trailing" secondItem="2x2-Kd-rmz" secondAttribute="trailing" constant="10" id="zzA-m5-Uky"/>
</constraints>
</tableViewCellContentView>
<connections>
<outlet property="avatarIV" destination="aZj-Ju-W07" id="ioR-hw-xH8"/>
</connections>
<point key="canvasLocation" x="250" y="337"/>
</tableViewCell>
</objects>
<resources>
<image name="avatar" width="720" height="720"/>
<image name="qrcode" width="17" height="17"/>
</resources>
</document>
``` | /content/code_sandbox/WeChat/ViewController/Me我/PersonCenterHeaderCell.xib | xml | 2016-06-06T01:53:42 | 2024-08-05T09:45:48 | WeChat | zhengwenming/WeChat | 1,626 | 1,663 |
```xml
<?xml version="1.0" encoding="UTF-8"?>
<document type="com.apple.InterfaceBuilder3.CocoaTouch.XIB" version="3.0" toolsVersion="15705" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" colorMatched="YES">
<device id="retina4_7" orientation="portrait" appearance="light"/>
<dependencies>
<deployment identifier="iOS"/>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="15706"/>
<capability name="documents saved in the Xcode 8 format" minToolsVersion="8.0"/>
</dependencies>
<objects>
<placeholder placeholderIdentifier="IBFilesOwner" id="-1" userLabel="File's Owner"/>
<placeholder placeholderIdentifier="IBFirstResponder" id="-2" customClass="UIResponder"/>
<tableViewCell contentMode="scaleToFill" selectionStyle="default" indentationWidth="10" id="KGk-i7-Jjw" userLabel="Like Users Cell" customClass="LikeUsersCell">
<rect key="frame" x="0.0" y="0.0" width="320" height="44"/>
<autoresizingMask key="autoresizingMask" flexibleMaxX="YES" flexibleMaxY="YES"/>
<tableViewCellContentView key="contentView" opaque="NO" clipsSubviews="YES" multipleTouchEnabled="YES" contentMode="center" tableViewCell="KGk-i7-Jjw" id="H2p-sc-9uM">
<rect key="frame" x="0.0" y="0.0" width="320" height="44"/>
<autoresizingMask key="autoresizingMask"/>
<subviews>
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="Label" textAlignment="natural" lineBreakMode="tailTruncation" numberOfLines="0" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="X59-fB-DSh">
<rect key="frame" x="0.0" y="8" width="320" height="36"/>
<fontDescription key="fontDescription" type="system" pointSize="13"/>
<nil key="textColor"/>
<nil key="highlightedColor"/>
</label>
</subviews>
<constraints>
<constraint firstItem="X59-fB-DSh" firstAttribute="top" secondItem="H2p-sc-9uM" secondAttribute="top" constant="8" id="28W-On-Mic"/>
<constraint firstAttribute="bottom" secondItem="X59-fB-DSh" secondAttribute="bottom" id="EdK-cq-Zhh"/>
<constraint firstAttribute="trailing" secondItem="X59-fB-DSh" secondAttribute="trailing" id="MPE-wa-vbL"/>
<constraint firstItem="X59-fB-DSh" firstAttribute="leading" secondItem="H2p-sc-9uM" secondAttribute="leading" id="pz3-cP-yb4"/>
</constraints>
</tableViewCellContentView>
<connections>
<outlet property="likeUsersLabel" destination="X59-fB-DSh" id="hhm-LI-wJL"/>
</connections>
<point key="canvasLocation" x="-19" y="35"/>
</tableViewCell>
</objects>
</document>
``` | /content/code_sandbox/WeChat/ViewController/Discover发现-朋友圈/朋友圈-单个tableView/LikeUsersCell.xib | xml | 2016-06-06T01:53:42 | 2024-08-05T09:45:48 | WeChat | zhengwenming/WeChat | 1,626 | 775 |
```objective-c
//
// WMTimeLineHeaderView.m
// WeChat
//
// Created by zhengwenming on 2017/9/18.
//
#import "WMTimeLineHeaderView.h"
@interface WMTimeLineHeaderView (){
CGFloat commentBtnWidth;
CGFloat commentBtnHeight;
CGFloat MaxLabelHeight;
}
@property(nonatomic,retain)UILabel *sepLine;
@property(nonatomic,retain)UIImageView *avatarIV;
@property(nonatomic,retain)UILabel *userNameLabel;
@property(nonatomic,retain)UILabel *timeStampLabel;
@property(nonatomic,strong)CopyAbleLabel *messageTextLabel;
@property(nonatomic,retain)UIButton *commentBtn;
@property(nonatomic,retain)UIButton *moreBtn;
@property(nonatomic,assign)BOOL isExpandNow;
@property(nonatomic,assign)NSInteger headerSection;
@property(nonatomic,strong)JGGView *jggView;
/**
* block
*/
@property (nonatomic, copy)void(^CommentBtnClickBlock)(UIButton *commentBtn,NSInteger headerSection);
/**
* block
*/
@property (nonatomic, copy)void(^MoreBtnClickBlock)(UIButton *moreBtn,BOOL isExpand);
@end
@implementation WMTimeLineHeaderView
/**
* self
*
* @param reuseIdentifier HeaderView
*
* @return
*/
- (instancetype)initWithReuseIdentifier:(nullable NSString *)reuseIdentifier{
if (self = [super initWithReuseIdentifier:reuseIdentifier]) {
self.contentView.backgroundColor = [UIColor whiteColor];
self.sepLine = [[UILabel alloc] initWithFrame:CGRectMake(0, 0, kScreenWidth, 1.0/(UIScreen.mainScreen.scale))];
self.sepLine.backgroundColor = [[UITableView new] separatorColor];
[self addSubview:self.sepLine];
self.avatarIV = [[UIImageView alloc]initWithFrame:CGRectMake(kGAP, kGAP, kAvatar_Size, kAvatar_Size)];
[self addSubview:self.avatarIV];
self.avatarIV.layer.cornerRadius =3;
self.avatarIV.clipsToBounds = YES;
self.userNameLabel = [[UILabel alloc]initWithFrame:CGRectMake(CGRectGetMaxX(self.avatarIV.frame)+kGAP, kGAP,kScreenWidth-kAvatar_Size-2*kGAP-kGAP, self.avatarIV.frame.size.height/2)];
self.userNameLabel.font = [UIFont systemFontOfSize:16.0];
self.userNameLabel.textColor = [UIColor colorWithRed:(54/255.0) green:(71/255.0) blue:(121/255.0) alpha:0.9];
[self addSubview:self.userNameLabel];
self.timeStampLabel = [[UILabel alloc]init];
self.timeStampLabel.font = [UIFont systemFontOfSize:12.0];
self.timeStampLabel.textColor = [UIColor lightGrayColor];
[self addSubview:self.timeStampLabel];
self.messageTextLabel = [[CopyAbleLabel alloc]init];
self.messageTextLabel.numberOfLines = 0;
self.messageTextLabel.lineBreakMode = NSLineBreakByCharWrapping;
self.messageTextLabel.font = [UIFont systemFontOfSize:14.0];
[self addSubview:self.messageTextLabel];
commentBtnWidth = 60;
commentBtnHeight = 22;
MaxLabelHeight = 75.0;
self.commentBtn = [UIButton buttonWithType:UIButtonTypeCustom];
self.commentBtn.backgroundColor = [UIColor whiteColor];
[self.commentBtn setTitle:@"" forState:UIControlStateNormal];
[self.commentBtn setTitle:@"" forState:UIControlStateSelected];
[self.commentBtn setTitleColor:[UIColor lightGrayColor] forState:UIControlStateNormal];
self.commentBtn.layer.borderColor = [UIColor lightGrayColor].CGColor;
self.commentBtn.layer.borderWidth = 1;
self.commentBtn.titleLabel.font = [UIFont systemFontOfSize:12.0];
[self.commentBtn setImage:[UIImage imageNamed:@"commentBtn"] forState:UIControlStateNormal];
[self.commentBtn setImage:[UIImage imageNamed:@"commentBtn"] forState:UIControlStateSelected];
[self.commentBtn addTarget:self action:@selector(commentAction:) forControlEvents:UIControlEventTouchUpInside];
[self addSubview:self.commentBtn];
self.moreBtn = [UIButton buttonWithType:UIButtonTypeCustom];
[self.moreBtn setTitle:@"" forState:UIControlStateNormal];
[self.moreBtn setTitle:@"" forState:UIControlStateSelected];
[self.moreBtn setTitleColor:[UIColor lightGrayColor] forState:UIControlStateNormal];
self.moreBtn.titleLabel.font = [UIFont systemFontOfSize:12.0];
self.moreBtn.contentHorizontalAlignment = UIControlContentHorizontalAlignmentLeft;
[self.moreBtn addTarget:self action:@selector(moreAction:) forControlEvents:UIControlEventTouchUpInside];
[self addSubview:self.moreBtn];
self.isExpandNow = NO;
self.jggView = [[JGGView alloc] init];
[self addSubview:self.jggView];
}
return self;
}
-(void)commentAction:(UIButton *)sender{
if (self.CommentBtnClickBlock) {
self.CommentBtnClickBlock(sender,self.headerSection);
}
}
-(void)moreAction:(UIButton *)sender{
if (self.MoreBtnClickBlock) {
self.MoreBtnClickBlock(sender,_isExpandNow);
}
}
-(void)setModel:(MessageInfoModel *)model{
__weak __typeof(self) weakSelf= self;
self.jggView.tapBlock = ^(NSInteger index, NSArray *dataSource) {
if (weakSelf.tapImageBlock) {
weakSelf.tapImageBlock(index, dataSource);
}
};
[self.avatarIV sd_setImageWithURL:[NSURL URLWithString:model.photo] placeholderImage:[UIImage imageNamed:@"placeholder"]];
self.userNameLabel.text = model.userName;
self.messageTextLabel.attributedText = model.mutablAttrStr;
self.messageTextLabel.frame = model.textLayout.frameLayout;
///
[self.jggView.subviews makeObjectsPerformSelector:@selector(removeFromSuperview)];
self.jggView.dataSource = model.messageSmallPics;
self.jggView.frame = model.jggLayout.frameLayout;
}
@end
``` | /content/code_sandbox/WeChat/ViewController/Discover发现-朋友圈/朋友圈-单个tableView/WMTimeLineHeaderView.m | objective-c | 2016-06-06T01:53:42 | 2024-08-05T09:45:48 | WeChat | zhengwenming/WeChat | 1,626 | 1,166 |
```objective-c
//
// WMTimeLineViewController.h
// WeChat
//
// Created by zhengwenming on 2017/9/21.
//
#import "TimeLineBaseViewController.h"
@interface WMTimeLineViewController : TimeLineBaseViewController
@end
``` | /content/code_sandbox/WeChat/ViewController/Discover发现-朋友圈/朋友圈-单个tableView/WMTimeLineViewController.h | objective-c | 2016-06-06T01:53:42 | 2024-08-05T09:45:48 | WeChat | zhengwenming/WeChat | 1,626 | 55 |
```objective-c
//
// CommentCell.m
// WeChat
//
// Created by zhengwenming on 2017/9/21.
//
#import "CommentCell.h"
#import "JGGView.h"
@interface CommentCell ()
@property (nonatomic, strong) CopyAbleLabel *contentLabel;
@end
@implementation CommentCell
- (instancetype)initWithStyle:(UITableViewCellStyle)style reuseIdentifier:(NSString *)reuseIdentifier {
if (self = [super initWithStyle:style reuseIdentifier:reuseIdentifier]) {
self.selectionStyle = UITableViewCellSelectionStyleNone;
UIColor *bgColor = [UIColor colorWithRed:236.0/256.0 green:236.0/256.0 blue:236.0/256.0 alpha:0.4];
self.backgroundColor = bgColor;
self.contentView.backgroundColor = bgColor;
// contentLabel
self.contentLabel = [[CopyAbleLabel alloc] init];
self.contentLabel.backgroundColor = bgColor;
self.contentLabel.preferredMaxLayoutWidth = kScreenWidth- kGAP-kAvatar_Size - 2*kGAP;
self.contentLabel.lineBreakMode = NSLineBreakByCharWrapping;
self.contentLabel.numberOfLines = 0;
self.contentLabel.font = [UIFont systemFontOfSize:13.0];
[self.contentView addSubview:self.contentLabel];
[self.contentLabel mas_makeConstraints:^(MASConstraintMaker *make) {
make.edges.equalTo(self.contentView).with.insets(UIEdgeInsetsMake(0, 0, 0, 0));
}];
UITapGestureRecognizer *tap = [[UITapGestureRecognizer alloc] initWithTarget:self action:@selector(tapComment:)];
[self.contentLabel addGestureRecognizer:tap];
}
return self;
}
-(void)tapComment:(UITapGestureRecognizer *)tap{
if (self.tapCommentBlock) {
self.tapCommentBlock(self, self.model);
}
}
#pragma mark
#pragma mark cell6410
-(void)setFrame:(CGRect)frame{
CGFloat leftSpace = 2*kGAP+kAvatar_Size;
frame.origin.x = leftSpace;
frame.size.width = kScreenWidth - leftSpace -10;
[super setFrame:frame];
}
-(void)setModel:(CommentInfoModel *)model{
_model = model;
if ([model isKindOfClass:[CommentInfoModel class]]) {
self.contentLabel.attributedText = model.attributedText;
}else{
self.contentLabel.text = @"";
}
}
@end
``` | /content/code_sandbox/WeChat/ViewController/Discover发现-朋友圈/朋友圈-单个tableView/CommentCell.m | objective-c | 2016-06-06T01:53:42 | 2024-08-05T09:45:48 | WeChat | zhengwenming/WeChat | 1,626 | 496 |
```objective-c
//
// LikeUsersCell.h
// WeChat
//
// Created by zhengwenming on 2017/9/23.
//
#import <UIKit/UIKit.h>
@class FriendInfoModel;
#import "MessageInfoModel.h"
typedef void(^TapNameBlock)(FriendInfoModel *friendModel);
@interface LikeUsersCell : UITableViewCell
///label
@property (weak, nonatomic) IBOutlet UILabel *likeUsersLabel;
@property(nonatomic,strong)NSMutableArray *likeUsersArray;
@property(nonatomic,strong)CommentInfoModel *model;
///block
@property(nonatomic ,copy)TapNameBlock tapNameBlock;
@end
``` | /content/code_sandbox/WeChat/ViewController/Discover发现-朋友圈/朋友圈-单个tableView/LikeUsersCell.h | objective-c | 2016-06-06T01:53:42 | 2024-08-05T09:45:48 | WeChat | zhengwenming/WeChat | 1,626 | 125 |
```objective-c
//
// CommentCell.h
// WeChat
//
// Created by zhengwenming on 2017/9/21.
//
#import <UIKit/UIKit.h>
#import "CommentInfoModel.h"
@class CommentCell;
typedef void(^TapCommentBlock)(CommentCell *cell,CommentInfoModel *model);
@interface CommentCell : UITableViewCell
@property(nonatomic,strong)CommentInfoModel *model;
///block
@property(nonatomic ,copy)TapCommentBlock tapCommentBlock;
@end
``` | /content/code_sandbox/WeChat/ViewController/Discover发现-朋友圈/朋友圈-单个tableView/CommentCell.h | objective-c | 2016-06-06T01:53:42 | 2024-08-05T09:45:48 | WeChat | zhengwenming/WeChat | 1,626 | 100 |
```objective-c
//
// CommentInfoModel.h
// WeChat
//
// Created by zhengwenming on 2017/9/21.
//
#import <Foundation/Foundation.h>
@interface CommentInfoModel : NSObject
@property (nonatomic, assign) BOOL isExpand;
@property(nonatomic,copy)NSString *commentId;
@property(nonatomic,copy)NSString *commentUserId;
@property(nonatomic,copy)NSString *commentUserName;
@property(nonatomic,copy)NSString *commentPhoto;
@property(nonatomic,copy)NSString *commentText;
@property(nonatomic,copy)NSAttributedString *attributedText;
@property(nonatomic,copy)NSString *commentByUserId;
@property(nonatomic,copy)NSString *commentByUserName;
@property(nonatomic,copy)NSString *commentByPhoto;
@property(nonatomic,copy)NSString *createDateStr;
@property(nonatomic,copy)NSString *checkStatus;
///
@property(nonatomic,copy)NSMutableArray *messageBigPicArray;
@property(nonatomic,copy)NSMutableAttributedString *likeUsersAttributedText;
@property(nonatomic,copy)NSMutableArray<CommentInfoModel *> *likeUsersArray;
//
@property (nonatomic,copy) NSMutableArray *commentModelArray;
@property (nonatomic, assign)CGFloat rowHeight;
-(instancetype)initWithDic:(NSDictionary *)dic;
@end
``` | /content/code_sandbox/WeChat/ViewController/Discover发现-朋友圈/朋友圈-单个tableView/CommentInfoModel.h | objective-c | 2016-06-06T01:53:42 | 2024-08-05T09:45:48 | WeChat | zhengwenming/WeChat | 1,626 | 223 |
```objective-c
//
// MessageInfoModel2.m
// WeChat
//
// Created by zhengwenming on 2017/9/21.
//
#import "MessageInfoModel.h"
@implementation MessageInfoModel
-(NSMutableArray *)commentModelArray{
if (_commentModelArray==nil) {
_commentModelArray = [NSMutableArray array];
}
return _commentModelArray;
}
-(NSMutableArray *)messageSmallPics{
if (_messageSmallPics==nil) {
_messageSmallPics = [NSMutableArray array];
}
return _messageSmallPics;
}
-(NSMutableArray *)messageBigPics{
if (_messageBigPics==nil) {
_messageBigPics = [NSMutableArray array];
}
return _messageBigPics;
}
-(Layout *)textLayout{
if (_textLayout==nil) {
_textLayout = [Layout new];
}
return _textLayout;
}
-(Layout *)jggLayout{
if (_jggLayout==nil) {
_jggLayout = [Layout new];
}
return _jggLayout;
}
-(instancetype)initWithDic:(NSDictionary *)dic{
self = [super init];
if (self) {
self.cid = dic[@"cid"];
self.message_id = dic[@"message_id"];
self.message = dic[@"message"];
self.timeTag = dic[@"timeTag"];
self.message_type = dic[@"message_type"];
self.userId = dic[@"userId"];
self.userName = dic[@"userName"];
self.photo = dic[@"photo"];
self.messageSmallPics = dic[@"messageSmallPics"];
self.messageBigPics = dic[@"messageBigPics"];
NSMutableArray <FriendInfoModel *>*likeUsers = [NSMutableArray array];
for (NSDictionary *friendInfoDic in dic[@"likeUsers"]) {
[likeUsers addObject:[[FriendInfoModel alloc]initWithDic:friendInfoDic]];
}
if (likeUsers.count) {
CommentInfoModel *infoModel = [CommentInfoModel new];
infoModel.likeUsersArray = likeUsers.mutableCopy;
//attributeStr
NSMutableArray *rangesArray = [NSMutableArray array];
NSMutableArray *nameArray = [NSMutableArray array];
NSMutableAttributedString *mutablAttrStr = [[NSMutableAttributedString alloc]init];
NSTextAttachment *attch = [[NSTextAttachment alloc] init];
//
attch.image = [UIImage imageNamed:@"Like"];
attch.bounds = CGRectMake(0, -5, attch.image.size.width, attch.image.size.height);
//
[mutablAttrStr insertAttributedString:[NSAttributedString attributedStringWithAttachment:attch] atIndex:0];
for (int i = 0; i <likeUsers.count; i++) {
FriendInfoModel *friendModel = likeUsers[i];
//name0,name1,name2,name1
[mutablAttrStr appendAttributedString:[[NSAttributedString alloc] initWithString:friendModel.userName]];
if ([nameArray containsObject:friendModel.userName]) {//
friendModel.range = NSMakeRange(mutablAttrStr.length-friendModel.userName.length, friendModel.userName.length);
}else{
friendModel.range = [mutablAttrStr.string rangeOfString:friendModel.userName];
}
if (i != likeUsers.count - 1) {
[mutablAttrStr appendAttributedString:[[NSAttributedString alloc] initWithString:@","]];
}
[rangesArray addObject:[NSValue valueWithRange:friendModel.range]];
[nameArray addObject:friendModel.userName];
}
UIFont *font = [UIFont systemFontOfSize:13.f];
[mutablAttrStr addAttributes:@{NSFontAttributeName : font} range:NSMakeRange(0, mutablAttrStr.length)];
NSMutableParagraphStyle *style = [[NSMutableParagraphStyle alloc] init];
style.lineSpacing = 3.0;
[mutablAttrStr addAttribute:NSParagraphStyleAttributeName value:style range:NSMakeRange(0, mutablAttrStr.length)];
//
for (NSValue *aRangeValue in rangesArray) {
[mutablAttrStr addAttributes:@{NSForegroundColorAttributeName : [UIColor orangeColor]} range:aRangeValue.rangeValue];
}
infoModel.likeUsersAttributedText = mutablAttrStr;
//likeUsercellrowHeight
infoModel.rowHeight = [mutablAttrStr.string boundingRectWithSize:CGSizeMake(kScreenWidth-kGAP-kAvatar_Size-2*kGAP, CGFLOAT_MAX) font:font lineSpacing:3.0].height+0.5+8+5;
// [self.commentModelArray addObject:infoModel];
}
//
for (NSDictionary *eachDic in dic[@"commentMessages"] ) {
[self.commentModelArray addObject:[[CommentInfoModel alloc] initWithDic:eachDic]];
}
NSMutableParagraphStyle *muStyle = [[NSMutableParagraphStyle alloc]init];
UIFont *font = [UIFont systemFontOfSize:14.0];
muStyle.alignment = NSTextAlignmentLeft;//
NSMutableAttributedString *attrString = [[NSMutableAttributedString alloc] initWithString:self.message];
[attrString addAttribute:NSFontAttributeName value:font range:NSMakeRange(0, attrString.length)];
[attrString addAttribute:NSParagraphStyleAttributeName value:muStyle range:NSMakeRange(0, attrString.length)];
if ([attrString.string isMoreThanOneLineWithSize:CGSizeMake(kScreenWidth-kGAP-kAvatar_Size-2*kGAP, CGFLOAT_MAX) font:font lineSpaceing:3.0]) {//margin
muStyle.lineSpacing = 3.0;//
}else{
muStyle.lineSpacing = CGFLOAT_MIN;//
}
self.mutablAttrStr = attrString;
//textlayout
CGFloat textHeight = [attrString.string boundingRectWithSize:CGSizeMake(kScreenWidth-kGAP-kAvatar_Size-2*kGAP, CGFLOAT_MAX) font:font lineSpacing:3.0].height+0.5;
self.textLayout.frameLayout = CGRectMake(kGAP+kAvatar_Size+kGAP, kGAP+kAvatar_Size/2+5, kScreenWidth-2*kGAP-kAvatar_Size-kGAP, textHeight);
//layout
CGFloat jgg_Width = kScreenWidth-2*kGAP-kAvatar_Size-50;
CGFloat image_Width = (jgg_Width-2*kGAP)/3;
CGFloat jgg_height = 0;
if (self.messageSmallPics.count==0) {
jgg_height = 0;
}else if (self.messageSmallPics.count<=3) {
jgg_height = image_Width;
}else if (self.messageSmallPics.count>3&&self.messageSmallPics.count<=6){
jgg_height = 2*image_Width+kGAP;
}else if (self.messageSmallPics.count>6&&self.messageSmallPics.count<=9){
jgg_height = 3*image_Width+2*kGAP;
}
self.jggLayout.frameLayout = CGRectMake(self.textLayout.frameLayout.origin.x, CGRectGetMaxY(self.textLayout.frameLayout)+kGAP, jgg_Width, jgg_height);
self.headerHeight = CGRectGetMaxY(self.jggLayout.frameLayout)+((self.messageSmallPics.count==0)?0.f:kGAP);
}
return self;
}
@end
``` | /content/code_sandbox/WeChat/ViewController/Discover发现-朋友圈/朋友圈-单个tableView/MessageInfoModel.m | objective-c | 2016-06-06T01:53:42 | 2024-08-05T09:45:48 | WeChat | zhengwenming/WeChat | 1,626 | 1,558 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.