text stringlengths 9 39.2M | dir stringlengths 25 226 | lang stringclasses 163 values | created_date timestamp[s] | updated_date timestamp[s] | repo_name stringclasses 751 values | repo_full_name stringclasses 752 values | star int64 1.01k 183k | len_tokens int64 1 18.5M |
|---|---|---|---|---|---|---|---|---|
```objective-c
#import "GPUImage3x3TextureSamplingFilter.h"
// Override vertex shader to remove dependent texture reads
NSString *const kGPUImageNearbyTexelSamplingVertexShaderString = SHADER_STRING
(
attribute vec4 position;
attribute vec4 inputTextureCoordinate;
uniform float texelWidth;
uniform float texelHeight;
varying vec2 textureCoordinate;
varying vec2 leftTextureCoordinate;
varying vec2 rightTextureCoordinate;
varying vec2 topTextureCoordinate;
varying vec2 topLeftTextureCoordinate;
varying vec2 topRightTextureCoordinate;
varying vec2 bottomTextureCoordinate;
varying vec2 bottomLeftTextureCoordinate;
varying vec2 bottomRightTextureCoordinate;
void main()
{
gl_Position = position;
vec2 widthStep = vec2(texelWidth, 0.0);
vec2 heightStep = vec2(0.0, texelHeight);
vec2 widthHeightStep = vec2(texelWidth, texelHeight);
vec2 widthNegativeHeightStep = vec2(texelWidth, -texelHeight);
textureCoordinate = inputTextureCoordinate.xy;
leftTextureCoordinate = inputTextureCoordinate.xy - widthStep;
rightTextureCoordinate = inputTextureCoordinate.xy + widthStep;
topTextureCoordinate = inputTextureCoordinate.xy - heightStep;
topLeftTextureCoordinate = inputTextureCoordinate.xy - widthHeightStep;
topRightTextureCoordinate = inputTextureCoordinate.xy + widthNegativeHeightStep;
bottomTextureCoordinate = inputTextureCoordinate.xy + heightStep;
bottomLeftTextureCoordinate = inputTextureCoordinate.xy - widthNegativeHeightStep;
bottomRightTextureCoordinate = inputTextureCoordinate.xy + widthHeightStep;
}
);
@implementation GPUImage3x3TextureSamplingFilter
@synthesize texelWidth = _texelWidth;
@synthesize texelHeight = _texelHeight;
#pragma mark -
#pragma mark Initialization and teardown
- (id)initWithVertexShaderFromString:(NSString *)vertexShaderString fragmentShaderFromString:(NSString *)fragmentShaderString;
{
if (!(self = [super initWithVertexShaderFromString:vertexShaderString fragmentShaderFromString:fragmentShaderString]))
{
return nil;
}
texelWidthUniform = [filterProgram uniformIndex:@"texelWidth"];
texelHeightUniform = [filterProgram uniformIndex:@"texelHeight"];
return self;
}
- (id)initWithFragmentShaderFromString:(NSString *)fragmentShaderString;
{
if (!(self = [self initWithVertexShaderFromString:kGPUImageNearbyTexelSamplingVertexShaderString fragmentShaderFromString:fragmentShaderString]))
{
return nil;
}
return self;
}
- (void)setupFilterForSize:(CGSize)filterFrameSize;
{
if (!hasOverriddenImageSizeFactor)
{
_texelWidth = 1.0 / filterFrameSize.width;
_texelHeight = 1.0 / filterFrameSize.height;
runSynchronouslyOnVideoProcessingQueue(^{
[GPUImageContext setActiveShaderProgram:filterProgram];
if (GPUImageRotationSwapsWidthAndHeight(inputRotation))
{
glUniform1f(texelWidthUniform, _texelHeight);
glUniform1f(texelHeightUniform, _texelWidth);
}
else
{
glUniform1f(texelWidthUniform, _texelWidth);
glUniform1f(texelHeightUniform, _texelHeight);
}
});
}
}
#pragma mark -
#pragma mark Accessors
- (void)setTexelWidth:(CGFloat)newValue;
{
hasOverriddenImageSizeFactor = YES;
_texelWidth = newValue;
[self setFloat:_texelWidth forUniform:texelWidthUniform program:filterProgram];
}
- (void)setTexelHeight:(CGFloat)newValue;
{
hasOverriddenImageSizeFactor = YES;
_texelHeight = newValue;
[self setFloat:_texelHeight forUniform:texelHeightUniform program:filterProgram];
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImage3x3TextureSamplingFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 824 |
```objective-c
#import "GPUImageLanczosResamplingFilter.h"
NSString *const kGPUImageLanczosVertexShaderString = SHADER_STRING
(
attribute vec4 position;
attribute vec2 inputTextureCoordinate;
uniform float texelWidthOffset;
uniform float texelHeightOffset;
varying vec2 centerTextureCoordinate;
varying vec2 oneStepLeftTextureCoordinate;
varying vec2 twoStepsLeftTextureCoordinate;
varying vec2 threeStepsLeftTextureCoordinate;
varying vec2 fourStepsLeftTextureCoordinate;
varying vec2 oneStepRightTextureCoordinate;
varying vec2 twoStepsRightTextureCoordinate;
varying vec2 threeStepsRightTextureCoordinate;
varying vec2 fourStepsRightTextureCoordinate;
void main()
{
gl_Position = position;
vec2 firstOffset = vec2(texelWidthOffset, texelHeightOffset);
vec2 secondOffset = vec2(2.0 * texelWidthOffset, 2.0 * texelHeightOffset);
vec2 thirdOffset = vec2(3.0 * texelWidthOffset, 3.0 * texelHeightOffset);
vec2 fourthOffset = vec2(4.0 * texelWidthOffset, 4.0 * texelHeightOffset);
centerTextureCoordinate = inputTextureCoordinate;
oneStepLeftTextureCoordinate = inputTextureCoordinate - firstOffset;
twoStepsLeftTextureCoordinate = inputTextureCoordinate - secondOffset;
threeStepsLeftTextureCoordinate = inputTextureCoordinate - thirdOffset;
fourStepsLeftTextureCoordinate = inputTextureCoordinate - fourthOffset;
oneStepRightTextureCoordinate = inputTextureCoordinate + firstOffset;
twoStepsRightTextureCoordinate = inputTextureCoordinate + secondOffset;
threeStepsRightTextureCoordinate = inputTextureCoordinate + thirdOffset;
fourStepsRightTextureCoordinate = inputTextureCoordinate + fourthOffset;
}
);
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageLanczosFragmentShaderString = SHADER_STRING
(
precision highp float;
uniform sampler2D inputImageTexture;
varying vec2 centerTextureCoordinate;
varying vec2 oneStepLeftTextureCoordinate;
varying vec2 twoStepsLeftTextureCoordinate;
varying vec2 threeStepsLeftTextureCoordinate;
varying vec2 fourStepsLeftTextureCoordinate;
varying vec2 oneStepRightTextureCoordinate;
varying vec2 twoStepsRightTextureCoordinate;
varying vec2 threeStepsRightTextureCoordinate;
varying vec2 fourStepsRightTextureCoordinate;
// sinc(x) * sinc(x/a) = (a * sin(pi * x) * sin(pi * x / a)) / (pi^2 * x^2)
// Assuming a Lanczos constant of 2.0, and scaling values to max out at x = +/- 1.5
void main()
{
lowp vec4 fragmentColor = texture2D(inputImageTexture, centerTextureCoordinate) * 0.38026;
fragmentColor += texture2D(inputImageTexture, oneStepLeftTextureCoordinate) * 0.27667;
fragmentColor += texture2D(inputImageTexture, oneStepRightTextureCoordinate) * 0.27667;
fragmentColor += texture2D(inputImageTexture, twoStepsLeftTextureCoordinate) * 0.08074;
fragmentColor += texture2D(inputImageTexture, twoStepsRightTextureCoordinate) * 0.08074;
fragmentColor += texture2D(inputImageTexture, threeStepsLeftTextureCoordinate) * -0.02612;
fragmentColor += texture2D(inputImageTexture, threeStepsRightTextureCoordinate) * -0.02612;
fragmentColor += texture2D(inputImageTexture, fourStepsLeftTextureCoordinate) * -0.02143;
fragmentColor += texture2D(inputImageTexture, fourStepsRightTextureCoordinate) * -0.02143;
gl_FragColor = fragmentColor;
}
);
#else
NSString *const kGPUImageLanczosFragmentShaderString = SHADER_STRING
(
uniform sampler2D inputImageTexture;
varying vec2 centerTextureCoordinate;
varying vec2 oneStepLeftTextureCoordinate;
varying vec2 twoStepsLeftTextureCoordinate;
varying vec2 threeStepsLeftTextureCoordinate;
varying vec2 fourStepsLeftTextureCoordinate;
varying vec2 oneStepRightTextureCoordinate;
varying vec2 twoStepsRightTextureCoordinate;
varying vec2 threeStepsRightTextureCoordinate;
varying vec2 fourStepsRightTextureCoordinate;
// sinc(x) * sinc(x/a) = (a * sin(pi * x) * sin(pi * x / a)) / (pi^2 * x^2)
// Assuming a Lanczos constant of 2.0, and scaling values to max out at x = +/- 1.5
void main()
{
vec4 fragmentColor = texture2D(inputImageTexture, centerTextureCoordinate) * 0.38026;
fragmentColor += texture2D(inputImageTexture, oneStepLeftTextureCoordinate) * 0.27667;
fragmentColor += texture2D(inputImageTexture, oneStepRightTextureCoordinate) * 0.27667;
fragmentColor += texture2D(inputImageTexture, twoStepsLeftTextureCoordinate) * 0.08074;
fragmentColor += texture2D(inputImageTexture, twoStepsRightTextureCoordinate) * 0.08074;
fragmentColor += texture2D(inputImageTexture, threeStepsLeftTextureCoordinate) * -0.02612;
fragmentColor += texture2D(inputImageTexture, threeStepsRightTextureCoordinate) * -0.02612;
fragmentColor += texture2D(inputImageTexture, fourStepsLeftTextureCoordinate) * -0.02143;
fragmentColor += texture2D(inputImageTexture, fourStepsRightTextureCoordinate) * -0.02143;
gl_FragColor = fragmentColor;
}
);
#endif
@implementation GPUImageLanczosResamplingFilter
@synthesize originalImageSize = _originalImageSize;
#pragma mark -
#pragma mark Initialization and teardown
- (id)init;
{
if (!(self = [super initWithFirstStageVertexShaderFromString:kGPUImageLanczosVertexShaderString firstStageFragmentShaderFromString:kGPUImageLanczosFragmentShaderString secondStageVertexShaderFromString:kGPUImageLanczosVertexShaderString secondStageFragmentShaderFromString:kGPUImageLanczosFragmentShaderString]))
{
return nil;
}
return self;
}
// Base texture sampling offset on the input image, not the final size
- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;
{
self.originalImageSize = newSize;
[super setInputSize:newSize atIndex:textureIndex];
}
- (void)setupFilterForSize:(CGSize)filterFrameSize;
{
runSynchronouslyOnVideoProcessingQueue(^{
// The first pass through the framebuffer may rotate the inbound image, so need to account for that by changing up the kernel ordering for that pass
if (GPUImageRotationSwapsWidthAndHeight(inputRotation))
{
verticalPassTexelWidthOffset = 1.0 / _originalImageSize.height;
verticalPassTexelHeightOffset = 0.0;
}
else
{
verticalPassTexelWidthOffset = 0.0;
verticalPassTexelHeightOffset = 1.0 / _originalImageSize.height;
}
horizontalPassTexelWidthOffset = 1.0 / _originalImageSize.width;
horizontalPassTexelHeightOffset = 0.0;
});
}
- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;
{
if (self.preventRendering)
{
[firstInputFramebuffer unlock];
return;
}
[GPUImageContext setActiveShaderProgram:filterProgram];
CGSize currentFBOSize = [self sizeOfFBO];
if (GPUImageRotationSwapsWidthAndHeight(inputRotation))
{
currentFBOSize.height = self.originalImageSize.height;
}
else
{
currentFBOSize.width = self.originalImageSize.width;
}
outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:currentFBOSize textureOptions:self.outputTextureOptions onlyTexture:NO];
[outputFramebuffer activateFramebuffer];
[self setUniformsForProgramAtIndex:0];
glClearColor(backgroundColorRed, backgroundColorGreen, backgroundColorBlue, backgroundColorAlpha);
glClear(GL_COLOR_BUFFER_BIT);
glActiveTexture(GL_TEXTURE2);
glBindTexture(GL_TEXTURE_2D, [firstInputFramebuffer texture]);
glUniform1i(filterInputTextureUniform, 2);
glVertexAttribPointer(filterPositionAttribute, 2, GL_FLOAT, 0, 0, vertices);
glVertexAttribPointer(filterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
[firstInputFramebuffer unlock];
// Run the second stage of the two-pass filter
[GPUImageContext setActiveShaderProgram:secondFilterProgram];
glActiveTexture(GL_TEXTURE2);
glBindTexture(GL_TEXTURE_2D, 0);
glActiveTexture(GL_TEXTURE3);
glBindTexture(GL_TEXTURE_2D, 0);
secondOutputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:[self sizeOfFBO] textureOptions:self.outputTextureOptions onlyTexture:NO];
[secondOutputFramebuffer activateFramebuffer];
if (usingNextFrameForImageCapture)
{
[secondOutputFramebuffer lock];
}
[self setUniformsForProgramAtIndex:1];
glActiveTexture(GL_TEXTURE3);
glBindTexture(GL_TEXTURE_2D, [outputFramebuffer texture]);
glVertexAttribPointer(secondFilterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, [[self class] textureCoordinatesForRotation:kGPUImageNoRotation]);
glUniform1i(secondFilterInputTextureUniform, 3);
glVertexAttribPointer(secondFilterPositionAttribute, 2, GL_FLOAT, 0, 0, vertices);
glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
[outputFramebuffer unlock];
outputFramebuffer = nil;
if (usingNextFrameForImageCapture)
{
dispatch_semaphore_signal(imageCaptureSemaphore);
}
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageLanczosResamplingFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 2,176 |
```objective-c
#import "GPUImageTwoPassTextureSamplingFilter.h"
@implementation GPUImageTwoPassTextureSamplingFilter
@synthesize verticalTexelSpacing = _verticalTexelSpacing;
@synthesize horizontalTexelSpacing = _horizontalTexelSpacing;
#pragma mark -
#pragma mark Initialization and teardown
- (id)initWithFirstStageVertexShaderFromString:(NSString *)firstStageVertexShaderString firstStageFragmentShaderFromString:(NSString *)firstStageFragmentShaderString secondStageVertexShaderFromString:(NSString *)secondStageVertexShaderString secondStageFragmentShaderFromString:(NSString *)secondStageFragmentShaderString
{
if (!(self = [super initWithFirstStageVertexShaderFromString:firstStageVertexShaderString firstStageFragmentShaderFromString:firstStageFragmentShaderString secondStageVertexShaderFromString:secondStageVertexShaderString secondStageFragmentShaderFromString:secondStageFragmentShaderString]))
{
return nil;
}
runSynchronouslyOnVideoProcessingQueue(^{
[GPUImageContext useImageProcessingContext];
verticalPassTexelWidthOffsetUniform = [filterProgram uniformIndex:@"texelWidthOffset"];
verticalPassTexelHeightOffsetUniform = [filterProgram uniformIndex:@"texelHeightOffset"];
horizontalPassTexelWidthOffsetUniform = [secondFilterProgram uniformIndex:@"texelWidthOffset"];
horizontalPassTexelHeightOffsetUniform = [secondFilterProgram uniformIndex:@"texelHeightOffset"];
});
self.verticalTexelSpacing = 1.0;
self.horizontalTexelSpacing = 1.0;
return self;
}
- (void)setUniformsForProgramAtIndex:(NSUInteger)programIndex;
{
[super setUniformsForProgramAtIndex:programIndex];
if (programIndex == 0)
{
glUniform1f(verticalPassTexelWidthOffsetUniform, verticalPassTexelWidthOffset);
glUniform1f(verticalPassTexelHeightOffsetUniform, verticalPassTexelHeightOffset);
}
else
{
glUniform1f(horizontalPassTexelWidthOffsetUniform, horizontalPassTexelWidthOffset);
glUniform1f(horizontalPassTexelHeightOffsetUniform, horizontalPassTexelHeightOffset);
}
}
- (void)setupFilterForSize:(CGSize)filterFrameSize;
{
runSynchronouslyOnVideoProcessingQueue(^{
// The first pass through the framebuffer may rotate the inbound image, so need to account for that by changing up the kernel ordering for that pass
if (GPUImageRotationSwapsWidthAndHeight(inputRotation))
{
verticalPassTexelWidthOffset = _verticalTexelSpacing / filterFrameSize.height;
verticalPassTexelHeightOffset = 0.0;
}
else
{
verticalPassTexelWidthOffset = 0.0;
verticalPassTexelHeightOffset = _verticalTexelSpacing / filterFrameSize.height;
}
horizontalPassTexelWidthOffset = _horizontalTexelSpacing / filterFrameSize.width;
horizontalPassTexelHeightOffset = 0.0;
});
}
#pragma mark -
#pragma mark Accessors
- (void)setVerticalTexelSpacing:(CGFloat)newValue;
{
_verticalTexelSpacing = newValue;
[self setupFilterForSize:[self sizeOfFBO]];
}
- (void)setHorizontalTexelSpacing:(CGFloat)newValue;
{
_horizontalTexelSpacing = newValue;
[self setupFilterForSize:[self sizeOfFBO]];
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageTwoPassTextureSamplingFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 701 |
```objective-c
#import "GPUImageFilter.h"
@interface GPUImageCrosshairGenerator : GPUImageFilter
{
GLint crosshairWidthUniform, crosshairColorUniform;
}
// The width of the displayed crosshairs, in pixels. Currently this only works well for odd widths. The default is 5.
@property(readwrite, nonatomic) CGFloat crosshairWidth;
// The color of the crosshairs is specified using individual red, green, and blue components (normalized to 1.0). The default is green: (0.0, 1.0, 0.0).
- (void)setCrosshairColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent;
// Rendering
- (void)renderCrosshairsFromArray:(GLfloat *)crosshairCoordinates count:(NSUInteger)numberOfCrosshairs frameTime:(CMTime)frameTime;
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageCrosshairGenerator.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 181 |
```objective-c
#import "GPUImagePixellateFilter.h"
@interface GPUImagePolkaDotFilter : GPUImagePixellateFilter
{
GLint dotScalingUniform;
}
@property(readwrite, nonatomic) CGFloat dotScaling;
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImagePolkaDotFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 46 |
```objective-c
#import "GPUImageFilter.h"
/**
* Levels like Photoshop.
*
* The min, max, minOut and maxOut parameters are floats in the range [0, 1].
* If you have parameters from Photoshop in the range [0, 255] you must first
* convert them to be [0, 1].
* The gamma/mid parameter is a float >= 0. This matches the value from Photoshop.
*
* If you want to apply levels to RGB as well as individual channels you need to use
* this filter twice - first for the individual channels and then for all channels.
*/
@interface GPUImageLevelsFilter : GPUImageFilter
{
GLint minUniform;
GLint midUniform;
GLint maxUniform;
GLint minOutputUniform;
GLint maxOutputUniform;
GPUVector3 minVector, midVector, maxVector, minOutputVector, maxOutputVector;
}
/** Set levels for the red channel */
- (void)setRedMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max minOut:(CGFloat)minOut maxOut:(CGFloat)maxOut;
- (void)setRedMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max;
/** Set levels for the green channel */
- (void)setGreenMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max minOut:(CGFloat)minOut maxOut:(CGFloat)maxOut;
- (void)setGreenMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max;
/** Set levels for the blue channel */
- (void)setBlueMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max minOut:(CGFloat)minOut maxOut:(CGFloat)maxOut;
- (void)setBlueMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max;
/** Set levels for all channels at once */
- (void)setMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max minOut:(CGFloat)minOut maxOut:(CGFloat)maxOut;
- (void)setMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max;
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageLevelsFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 444 |
```objective-c
#import "GPUImageSobelEdgeDetectionFilter.h"
@interface GPUImageThresholdEdgeDetectionFilter : GPUImageSobelEdgeDetectionFilter
{
GLint thresholdUniform;
}
/** Any edge above this threshold will be black, and anything below white. Ranges from 0.0 to 1.0, with 0.8 as the default
*/
@property(readwrite, nonatomic) CGFloat threshold;
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageThresholdEdgeDetectionFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 85 |
```objective-c
#import "GPUImageChromaKeyBlendFilter.h"
// Shader code based on Apple's CIChromaKeyFilter example: path_to_url#samplecode/CIChromaKeyFilter/Introduction/Intro.html
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageChromaKeyBlendFragmentShaderString = SHADER_STRING
(
precision highp float;
varying highp vec2 textureCoordinate;
varying highp vec2 textureCoordinate2;
uniform float thresholdSensitivity;
uniform float smoothing;
uniform vec3 colorToReplace;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
void main()
{
vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);
float maskY = 0.2989 * colorToReplace.r + 0.5866 * colorToReplace.g + 0.1145 * colorToReplace.b;
float maskCr = 0.7132 * (colorToReplace.r - maskY);
float maskCb = 0.5647 * (colorToReplace.b - maskY);
float Y = 0.2989 * textureColor.r + 0.5866 * textureColor.g + 0.1145 * textureColor.b;
float Cr = 0.7132 * (textureColor.r - Y);
float Cb = 0.5647 * (textureColor.b - Y);
// float blendValue = 1.0 - smoothstep(thresholdSensitivity - smoothing, thresholdSensitivity , abs(Cr - maskCr) + abs(Cb - maskCb));
float blendValue = 1.0 - smoothstep(thresholdSensitivity, thresholdSensitivity + smoothing, distance(vec2(Cr, Cb), vec2(maskCr, maskCb)));
gl_FragColor = mix(textureColor, textureColor2, blendValue);
}
);
#else
NSString *const kGPUImageChromaKeyBlendFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
varying vec2 textureCoordinate2;
uniform float thresholdSensitivity;
uniform float smoothing;
uniform vec3 colorToReplace;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
void main()
{
vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);
float maskY = 0.2989 * colorToReplace.r + 0.5866 * colorToReplace.g + 0.1145 * colorToReplace.b;
float maskCr = 0.7132 * (colorToReplace.r - maskY);
float maskCb = 0.5647 * (colorToReplace.b - maskY);
float Y = 0.2989 * textureColor.r + 0.5866 * textureColor.g + 0.1145 * textureColor.b;
float Cr = 0.7132 * (textureColor.r - Y);
float Cb = 0.5647 * (textureColor.b - Y);
// float blendValue = 1.0 - smoothstep(thresholdSensitivity - smoothing, thresholdSensitivity , abs(Cr - maskCr) + abs(Cb - maskCb));
float blendValue = 1.0 - smoothstep(thresholdSensitivity, thresholdSensitivity + smoothing, distance(vec2(Cr, Cb), vec2(maskCr, maskCb)));
gl_FragColor = mix(textureColor, textureColor2, blendValue);
}
);
#endif
@implementation GPUImageChromaKeyBlendFilter
@synthesize thresholdSensitivity = _thresholdSensitivity;
@synthesize smoothing = _smoothing;
- (id)init;
{
if (!(self = [super initWithFragmentShaderFromString:kGPUImageChromaKeyBlendFragmentShaderString]))
{
return nil;
}
thresholdSensitivityUniform = [filterProgram uniformIndex:@"thresholdSensitivity"];
smoothingUniform = [filterProgram uniformIndex:@"smoothing"];
colorToReplaceUniform = [filterProgram uniformIndex:@"colorToReplace"];
self.thresholdSensitivity = 0.4;
self.smoothing = 0.1;
[self setColorToReplaceRed:0.0 green:1.0 blue:0.0];
return self;
}
#pragma mark -
#pragma mark Accessors
- (void)setColorToReplaceRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent;
{
GPUVector3 colorToReplace = {redComponent, greenComponent, blueComponent};
[self setVec3:colorToReplace forUniform:colorToReplaceUniform program:filterProgram];
}
- (void)setThresholdSensitivity:(CGFloat)newValue;
{
_thresholdSensitivity = newValue;
[self setFloat:(GLfloat)_thresholdSensitivity forUniform:thresholdSensitivityUniform program:filterProgram];
}
- (void)setSmoothing:(CGFloat)newValue;
{
_smoothing = newValue;
[self setFloat:(GLfloat)_smoothing forUniform:smoothingUniform program:filterProgram];
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageChromaKeyBlendFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 1,104 |
```objective-c
#import "GPUImageVideoCamera.h"
#import "GPUImageMovieWriter.h"
#import "GPUImageFilter.h"
// Color Conversion Constants (YUV to RGB) including adjustment from 16-235/16-240 (video range)
// BT.601, which is the standard for SDTV.
const GLfloat kColorConversion601[] = {
1.164, 1.164, 1.164,
0.0, -0.392, 2.017,
1.596, -0.813, 0.0,
};
// BT.709, which is the standard for HDTV.
const GLfloat kColorConversion709[] = {
1.164, 1.164, 1.164,
0.0, -0.213, 2.112,
1.793, -0.533, 0.0,
};
// BT.601 full range (ref: path_to_url
const GLfloat kColorConversion601FullRange[] = {
1.0, 1.0, 1.0,
0.0, -0.343, 1.765,
1.4, -0.711, 0.0,
};
NSString *const kGPUImageYUVVideoRangeConversionForRGFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
uniform sampler2D luminanceTexture;
uniform sampler2D chrominanceTexture;
uniform mediump mat3 colorConversionMatrix;
void main()
{
mediump vec3 yuv;
lowp vec3 rgb;
yuv.x = texture2D(luminanceTexture, textureCoordinate).r;
yuv.yz = texture2D(chrominanceTexture, textureCoordinate).rg - vec2(0.5, 0.5);
rgb = colorConversionMatrix * yuv;
gl_FragColor = vec4(rgb, 1);
}
);
NSString *const kGPUImageYUVFullRangeConversionForLAFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
uniform sampler2D luminanceTexture;
uniform sampler2D chrominanceTexture;
uniform mediump mat3 colorConversionMatrix;
void main()
{
mediump vec3 yuv;
lowp vec3 rgb;
yuv.x = texture2D(luminanceTexture, textureCoordinate).r;
yuv.yz = texture2D(chrominanceTexture, textureCoordinate).ra - vec2(0.5, 0.5);
rgb = colorConversionMatrix * yuv;
gl_FragColor = vec4(rgb, 1);
}
);
NSString *const kGPUImageYUVVideoRangeConversionForLAFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
uniform sampler2D luminanceTexture;
uniform sampler2D chrominanceTexture;
uniform mediump mat3 colorConversionMatrix;
void main()
{
mediump vec3 yuv;
lowp vec3 rgb;
yuv.x = texture2D(luminanceTexture, textureCoordinate).r - (16.0/255.0);
yuv.yz = texture2D(chrominanceTexture, textureCoordinate).ra - vec2(0.5, 0.5);
rgb = colorConversionMatrix * yuv;
gl_FragColor = vec4(rgb, 1);
}
);
#pragma mark -
#pragma mark Private methods and instance variables
@interface GPUImageVideoCamera ()
{
AVCaptureDeviceInput *audioInput;
AVCaptureAudioDataOutput *audioOutput;
NSDate *startingCaptureTime;
dispatch_queue_t cameraProcessingQueue, audioProcessingQueue;
GLProgram *yuvConversionProgram;
GLint yuvConversionPositionAttribute, yuvConversionTextureCoordinateAttribute;
GLint yuvConversionLuminanceTextureUniform, yuvConversionChrominanceTextureUniform;
GLint yuvConversionMatrixUniform;
const GLfloat *_preferredConversion;
BOOL isFullYUVRange;
int imageBufferWidth, imageBufferHeight;
BOOL addedAudioInputsDueToEncodingTarget;
}
- (void)updateOrientationSendToTargets;
- (void)convertYUVToRGBOutput;
@end
@implementation GPUImageVideoCamera
@synthesize captureSessionPreset = _captureSessionPreset;
@synthesize captureSession = _captureSession;
@synthesize inputCamera = _inputCamera;
@synthesize runBenchmark = _runBenchmark;
@synthesize outputImageOrientation = _outputImageOrientation;
@synthesize delegate = _delegate;
@synthesize horizontallyMirrorFrontFacingCamera = _horizontallyMirrorFrontFacingCamera, horizontallyMirrorRearFacingCamera = _horizontallyMirrorRearFacingCamera;
@synthesize frameRate = _frameRate;
#pragma mark -
#pragma mark Initialization and teardown
- (id)init;
{
if (!(self = [self initWithSessionPreset:AVCaptureSessionPreset640x480 cameraPosition:AVCaptureDevicePositionBack]))
{
return nil;
}
return self;
}
- (id)initWithSessionPreset:(NSString *)sessionPreset cameraPosition:(AVCaptureDevicePosition)cameraPosition;
{
if (!(self = [super init]))
{
return nil;
}
cameraProcessingQueue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH,0);
audioProcessingQueue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_LOW,0);
frameRenderingSemaphore = dispatch_semaphore_create(1);
_frameRate = 0; // This will not set frame rate unless this value gets set to 1 or above
_runBenchmark = NO;
capturePaused = NO;
outputRotation = kGPUImageNoRotation;
internalRotation = kGPUImageNoRotation;
captureAsYUV = YES;
_preferredConversion = kColorConversion709;
// Grab the back-facing or front-facing camera
_inputCamera = nil;
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for (AVCaptureDevice *device in devices)
{
if ([device position] == cameraPosition)
{
_inputCamera = device;
}
}
if (!_inputCamera) {
return nil;
}
// Create the capture session
_captureSession = [[AVCaptureSession alloc] init];
[_captureSession beginConfiguration];
// Add the video input
NSError *error = nil;
videoInput = [[AVCaptureDeviceInput alloc] initWithDevice:_inputCamera error:&error];
if ([_captureSession canAddInput:videoInput])
{
[_captureSession addInput:videoInput];
}
// Add the video frame output
videoOutput = [[AVCaptureVideoDataOutput alloc] init];
[videoOutput setAlwaysDiscardsLateVideoFrames:NO];
// if (captureAsYUV && [GPUImageContext deviceSupportsRedTextures])
if (captureAsYUV && [GPUImageContext supportsFastTextureUpload])
{
BOOL supportsFullYUVRange = NO;
NSArray *supportedPixelFormats = videoOutput.availableVideoCVPixelFormatTypes;
for (NSNumber *currentPixelFormat in supportedPixelFormats)
{
if ([currentPixelFormat intValue] == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)
{
supportsFullYUVRange = YES;
}
}
if (supportsFullYUVRange)
{
[videoOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarFullRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
isFullYUVRange = YES;
}
else
{
[videoOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
isFullYUVRange = NO;
}
}
else
{
[videoOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
}
runSynchronouslyOnVideoProcessingQueue(^{
if (captureAsYUV)
{
[GPUImageContext useImageProcessingContext];
// if ([GPUImageContext deviceSupportsRedTextures])
// {
// yuvConversionProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImageYUVVideoRangeConversionForRGFragmentShaderString];
// }
// else
// {
if (isFullYUVRange)
{
yuvConversionProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImageYUVFullRangeConversionForLAFragmentShaderString];
}
else
{
yuvConversionProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImageYUVVideoRangeConversionForLAFragmentShaderString];
}
// }
if (!yuvConversionProgram.initialized)
{
[yuvConversionProgram addAttribute:@"position"];
[yuvConversionProgram addAttribute:@"inputTextureCoordinate"];
if (![yuvConversionProgram link])
{
NSString *progLog = [yuvConversionProgram programLog];
NSLog(@"Program link log: %@", progLog);
NSString *fragLog = [yuvConversionProgram fragmentShaderLog];
NSLog(@"Fragment shader compile log: %@", fragLog);
NSString *vertLog = [yuvConversionProgram vertexShaderLog];
NSLog(@"Vertex shader compile log: %@", vertLog);
yuvConversionProgram = nil;
NSAssert(NO, @"Filter shader link failed");
}
}
yuvConversionPositionAttribute = [yuvConversionProgram attributeIndex:@"position"];
yuvConversionTextureCoordinateAttribute = [yuvConversionProgram attributeIndex:@"inputTextureCoordinate"];
yuvConversionLuminanceTextureUniform = [yuvConversionProgram uniformIndex:@"luminanceTexture"];
yuvConversionChrominanceTextureUniform = [yuvConversionProgram uniformIndex:@"chrominanceTexture"];
yuvConversionMatrixUniform = [yuvConversionProgram uniformIndex:@"colorConversionMatrix"];
[GPUImageContext setActiveShaderProgram:yuvConversionProgram];
glEnableVertexAttribArray(yuvConversionPositionAttribute);
glEnableVertexAttribArray(yuvConversionTextureCoordinateAttribute);
}
});
[videoOutput setSampleBufferDelegate:self queue:cameraProcessingQueue];
if ([_captureSession canAddOutput:videoOutput])
{
[_captureSession addOutput:videoOutput];
}
else
{
NSLog(@"Couldn't add video output");
return nil;
}
_captureSessionPreset = sessionPreset;
[_captureSession setSessionPreset:_captureSessionPreset];
// This will let you get 60 FPS video from the 720p preset on an iPhone 4S, but only that device and that preset
// AVCaptureConnection *conn = [videoOutput connectionWithMediaType:AVMediaTypeVideo];
//
// if (conn.supportsVideoMinFrameDuration)
// conn.videoMinFrameDuration = CMTimeMake(1,60);
// if (conn.supportsVideoMaxFrameDuration)
// conn.videoMaxFrameDuration = CMTimeMake(1,60);
[_captureSession commitConfiguration];
return self;
}
- (GPUImageFramebuffer *)framebufferForOutput;
{
return outputFramebuffer;
}
- (void)dealloc
{
[self stopCameraCapture];
[videoOutput setSampleBufferDelegate:nil queue:dispatch_get_main_queue()];
[audioOutput setSampleBufferDelegate:nil queue:dispatch_get_main_queue()];
[self removeInputsAndOutputs];
// ARC forbids explicit message send of 'release'; since iOS 6 even for dispatch_release() calls: stripping it out in that case is required.
#if !OS_OBJECT_USE_OBJC
if (frameRenderingSemaphore != NULL)
{
dispatch_release(frameRenderingSemaphore);
}
#endif
}
- (BOOL)addAudioInputsAndOutputs
{
if (audioOutput)
return NO;
[_captureSession beginConfiguration];
_microphone = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
audioInput = [AVCaptureDeviceInput deviceInputWithDevice:_microphone error:nil];
if ([_captureSession canAddInput:audioInput])
{
[_captureSession addInput:audioInput];
}
audioOutput = [[AVCaptureAudioDataOutput alloc] init];
if ([_captureSession canAddOutput:audioOutput])
{
[_captureSession addOutput:audioOutput];
}
else
{
NSLog(@"Couldn't add audio output");
}
[audioOutput setSampleBufferDelegate:self queue:audioProcessingQueue];
[_captureSession commitConfiguration];
return YES;
}
- (BOOL)removeAudioInputsAndOutputs
{
if (!audioOutput)
return NO;
[_captureSession beginConfiguration];
[_captureSession removeInput:audioInput];
[_captureSession removeOutput:audioOutput];
audioInput = nil;
audioOutput = nil;
_microphone = nil;
[_captureSession commitConfiguration];
return YES;
}
- (void)removeInputsAndOutputs;
{
[_captureSession beginConfiguration];
if (videoInput) {
[_captureSession removeInput:videoInput];
[_captureSession removeOutput:videoOutput];
videoInput = nil;
videoOutput = nil;
}
if (_microphone != nil)
{
[_captureSession removeInput:audioInput];
[_captureSession removeOutput:audioOutput];
audioInput = nil;
audioOutput = nil;
_microphone = nil;
}
[_captureSession commitConfiguration];
}
#pragma mark -
#pragma mark Managing targets
- (void)addTarget:(id<GPUImageInput>)newTarget atTextureLocation:(NSInteger)textureLocation;
{
[super addTarget:newTarget atTextureLocation:textureLocation];
[newTarget setInputRotation:outputRotation atIndex:textureLocation];
}
#pragma mark -
#pragma mark Manage the camera video stream
- (void)startCameraCapture;
{
if (![_captureSession isRunning])
{
startingCaptureTime = [NSDate date];
[_captureSession startRunning];
};
}
- (void)stopCameraCapture;
{
if ([_captureSession isRunning])
{
[_captureSession stopRunning];
}
}
- (void)pauseCameraCapture;
{
capturePaused = YES;
}
- (void)resumeCameraCapture;
{
capturePaused = NO;
}
- (void)rotateCamera
{
if (self.frontFacingCameraPresent == NO)
return;
NSError *error;
AVCaptureDeviceInput *newVideoInput;
AVCaptureDevicePosition currentCameraPosition = [[videoInput device] position];
if (currentCameraPosition == AVCaptureDevicePositionBack)
{
currentCameraPosition = AVCaptureDevicePositionFront;
}
else
{
currentCameraPosition = AVCaptureDevicePositionBack;
}
AVCaptureDevice *backFacingCamera = nil;
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for (AVCaptureDevice *device in devices)
{
if ([device position] == currentCameraPosition)
{
backFacingCamera = device;
}
}
newVideoInput = [[AVCaptureDeviceInput alloc] initWithDevice:backFacingCamera error:&error];
if (newVideoInput != nil)
{
[_captureSession beginConfiguration];
[_captureSession removeInput:videoInput];
if ([_captureSession canAddInput:newVideoInput])
{
[_captureSession addInput:newVideoInput];
videoInput = newVideoInput;
}
else
{
[_captureSession addInput:videoInput];
}
//captureSession.sessionPreset = oriPreset;
[_captureSession commitConfiguration];
}
_inputCamera = backFacingCamera;
[self setOutputImageOrientation:_outputImageOrientation];
}
- (AVCaptureDevicePosition)cameraPosition
{
return [[videoInput device] position];
}
+ (BOOL)isBackFacingCameraPresent;
{
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for (AVCaptureDevice *device in devices)
{
if ([device position] == AVCaptureDevicePositionBack)
return YES;
}
return NO;
}
- (BOOL)isBackFacingCameraPresent
{
return [GPUImageVideoCamera isBackFacingCameraPresent];
}
+ (BOOL)isFrontFacingCameraPresent;
{
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for (AVCaptureDevice *device in devices)
{
if ([device position] == AVCaptureDevicePositionFront)
return YES;
}
return NO;
}
- (BOOL)isFrontFacingCameraPresent
{
return [GPUImageVideoCamera isFrontFacingCameraPresent];
}
- (void)setCaptureSessionPreset:(NSString *)captureSessionPreset;
{
[_captureSession beginConfiguration];
_captureSessionPreset = captureSessionPreset;
[_captureSession setSessionPreset:_captureSessionPreset];
[_captureSession commitConfiguration];
}
- (void)setFrameRate:(int32_t)frameRate;
{
_frameRate = frameRate;
if (_frameRate > 0)
{
if ([_inputCamera respondsToSelector:@selector(setActiveVideoMinFrameDuration:)] &&
[_inputCamera respondsToSelector:@selector(setActiveVideoMaxFrameDuration:)]) {
NSError *error;
[_inputCamera lockForConfiguration:&error];
if (error == nil) {
#if defined(__IPHONE_7_0)
[_inputCamera setActiveVideoMinFrameDuration:CMTimeMake(1, _frameRate)];
[_inputCamera setActiveVideoMaxFrameDuration:CMTimeMake(1, _frameRate)];
#endif
}
[_inputCamera unlockForConfiguration];
} else {
for (AVCaptureConnection *connection in videoOutput.connections)
{
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wdeprecated-declarations"
if ([connection respondsToSelector:@selector(setVideoMinFrameDuration:)])
connection.videoMinFrameDuration = CMTimeMake(1, _frameRate);
if ([connection respondsToSelector:@selector(setVideoMaxFrameDuration:)])
connection.videoMaxFrameDuration = CMTimeMake(1, _frameRate);
#pragma clang diagnostic pop
}
}
}
else
{
if ([_inputCamera respondsToSelector:@selector(setActiveVideoMinFrameDuration:)] &&
[_inputCamera respondsToSelector:@selector(setActiveVideoMaxFrameDuration:)]) {
NSError *error;
[_inputCamera lockForConfiguration:&error];
if (error == nil) {
#if defined(__IPHONE_7_0)
[_inputCamera setActiveVideoMinFrameDuration:kCMTimeInvalid];
[_inputCamera setActiveVideoMaxFrameDuration:kCMTimeInvalid];
#endif
}
[_inputCamera unlockForConfiguration];
} else {
for (AVCaptureConnection *connection in videoOutput.connections)
{
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wdeprecated-declarations"
if ([connection respondsToSelector:@selector(setVideoMinFrameDuration:)])
connection.videoMinFrameDuration = kCMTimeInvalid; // This sets videoMinFrameDuration back to default
if ([connection respondsToSelector:@selector(setVideoMaxFrameDuration:)])
connection.videoMaxFrameDuration = kCMTimeInvalid; // This sets videoMaxFrameDuration back to default
#pragma clang diagnostic pop
}
}
}
}
- (int32_t)frameRate;
{
return _frameRate;
}
- (AVCaptureConnection *)videoCaptureConnection {
for (AVCaptureConnection *connection in [videoOutput connections] ) {
for ( AVCaptureInputPort *port in [connection inputPorts] ) {
if ( [[port mediaType] isEqual:AVMediaTypeVideo] ) {
return connection;
}
}
}
return nil;
}
#define INITIALFRAMESTOIGNOREFORBENCHMARK 5
- (void)updateTargetsForVideoCameraUsingCacheTextureAtWidth:(int)bufferWidth height:(int)bufferHeight time:(CMTime)currentTime;
{
// First, update all the framebuffers in the targets
for (id<GPUImageInput> currentTarget in targets)
{
if ([currentTarget enabled])
{
NSInteger indexOfObject = [targets indexOfObject:currentTarget];
NSInteger textureIndexOfTarget = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
if (currentTarget != self.targetToIgnoreForUpdates)
{
[currentTarget setInputRotation:outputRotation atIndex:textureIndexOfTarget];
[currentTarget setInputSize:CGSizeMake(bufferWidth, bufferHeight) atIndex:textureIndexOfTarget];
if ([currentTarget wantsMonochromeInput] && captureAsYUV)
{
[currentTarget setCurrentlyReceivingMonochromeInput:YES];
// TODO: Replace optimization for monochrome output
[currentTarget setInputFramebuffer:outputFramebuffer atIndex:textureIndexOfTarget];
}
else
{
[currentTarget setCurrentlyReceivingMonochromeInput:NO];
[currentTarget setInputFramebuffer:outputFramebuffer atIndex:textureIndexOfTarget];
}
}
else
{
[currentTarget setInputRotation:outputRotation atIndex:textureIndexOfTarget];
[currentTarget setInputFramebuffer:outputFramebuffer atIndex:textureIndexOfTarget];
}
}
}
// Then release our hold on the local framebuffer to send it back to the cache as soon as it's no longer needed
[outputFramebuffer unlock];
outputFramebuffer = nil;
// Finally, trigger rendering as needed
for (id<GPUImageInput> currentTarget in targets)
{
if ([currentTarget enabled])
{
NSInteger indexOfObject = [targets indexOfObject:currentTarget];
NSInteger textureIndexOfTarget = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
if (currentTarget != self.targetToIgnoreForUpdates)
{
[currentTarget newFrameReadyAtTime:currentTime atIndex:textureIndexOfTarget];
}
}
}
}
- (void)processVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer;
{
if (capturePaused)
{
return;
}
CFAbsoluteTime startTime = CFAbsoluteTimeGetCurrent();
CVImageBufferRef cameraFrame = CMSampleBufferGetImageBuffer(sampleBuffer);
int bufferWidth = (int) CVPixelBufferGetWidth(cameraFrame);
int bufferHeight = (int) CVPixelBufferGetHeight(cameraFrame);
CFTypeRef colorAttachments = CVBufferGetAttachment(cameraFrame, kCVImageBufferYCbCrMatrixKey, NULL);
if (colorAttachments != NULL)
{
if(CFStringCompare(colorAttachments, kCVImageBufferYCbCrMatrix_ITU_R_601_4, 0) == kCFCompareEqualTo)
{
if (isFullYUVRange)
{
_preferredConversion = kColorConversion601FullRange;
}
else
{
_preferredConversion = kColorConversion601;
}
}
else
{
_preferredConversion = kColorConversion709;
}
}
else
{
if (isFullYUVRange)
{
_preferredConversion = kColorConversion601FullRange;
}
else
{
_preferredConversion = kColorConversion601;
}
}
CMTime currentTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
[GPUImageContext useImageProcessingContext];
if ([GPUImageContext supportsFastTextureUpload] && captureAsYUV)
{
CVOpenGLESTextureRef luminanceTextureRef = NULL;
CVOpenGLESTextureRef chrominanceTextureRef = NULL;
// if (captureAsYUV && [GPUImageContext deviceSupportsRedTextures])
if (CVPixelBufferGetPlaneCount(cameraFrame) > 0) // Check for YUV planar inputs to do RGB conversion
{
CVPixelBufferLockBaseAddress(cameraFrame, 0);
if ( (imageBufferWidth != bufferWidth) && (imageBufferHeight != bufferHeight) )
{
imageBufferWidth = bufferWidth;
imageBufferHeight = bufferHeight;
}
CVReturn err;
// Y-plane
glActiveTexture(GL_TEXTURE4);
if ([GPUImageContext deviceSupportsRedTextures])
{
// err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, coreVideoTextureCache, cameraFrame, NULL, GL_TEXTURE_2D, GL_RED_EXT, bufferWidth, bufferHeight, GL_RED_EXT, GL_UNSIGNED_BYTE, 0, &luminanceTextureRef);
err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], cameraFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE, bufferWidth, bufferHeight, GL_LUMINANCE, GL_UNSIGNED_BYTE, 0, &luminanceTextureRef);
}
else
{
err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], cameraFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE, bufferWidth, bufferHeight, GL_LUMINANCE, GL_UNSIGNED_BYTE, 0, &luminanceTextureRef);
}
if (err)
{
NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err);
}
luminanceTexture = CVOpenGLESTextureGetName(luminanceTextureRef);
glBindTexture(GL_TEXTURE_2D, luminanceTexture);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
// UV-plane
glActiveTexture(GL_TEXTURE5);
if ([GPUImageContext deviceSupportsRedTextures])
{
// err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, coreVideoTextureCache, cameraFrame, NULL, GL_TEXTURE_2D, GL_RG_EXT, bufferWidth/2, bufferHeight/2, GL_RG_EXT, GL_UNSIGNED_BYTE, 1, &chrominanceTextureRef);
err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], cameraFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE_ALPHA, bufferWidth/2, bufferHeight/2, GL_LUMINANCE_ALPHA, GL_UNSIGNED_BYTE, 1, &chrominanceTextureRef);
}
else
{
err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], cameraFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE_ALPHA, bufferWidth/2, bufferHeight/2, GL_LUMINANCE_ALPHA, GL_UNSIGNED_BYTE, 1, &chrominanceTextureRef);
}
if (err)
{
NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err);
}
chrominanceTexture = CVOpenGLESTextureGetName(chrominanceTextureRef);
glBindTexture(GL_TEXTURE_2D, chrominanceTexture);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
// if (!allTargetsWantMonochromeData)
// {
[self convertYUVToRGBOutput];
// }
int rotatedImageBufferWidth = bufferWidth, rotatedImageBufferHeight = bufferHeight;
if (GPUImageRotationSwapsWidthAndHeight(internalRotation))
{
rotatedImageBufferWidth = bufferHeight;
rotatedImageBufferHeight = bufferWidth;
}
[self updateTargetsForVideoCameraUsingCacheTextureAtWidth:rotatedImageBufferWidth height:rotatedImageBufferHeight time:currentTime];
CVPixelBufferUnlockBaseAddress(cameraFrame, 0);
CFRelease(luminanceTextureRef);
CFRelease(chrominanceTextureRef);
}
else
{
// TODO: Mesh this with the output framebuffer structure
// CVPixelBufferLockBaseAddress(cameraFrame, 0);
//
// CVReturn err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], cameraFrame, NULL, GL_TEXTURE_2D, GL_RGBA, bufferWidth, bufferHeight, GL_BGRA, GL_UNSIGNED_BYTE, 0, &texture);
//
// if (!texture || err) {
// NSLog(@"Camera CVOpenGLESTextureCacheCreateTextureFromImage failed (error: %d)", err);
// NSAssert(NO, @"Camera failure");
// return;
// }
//
// outputTexture = CVOpenGLESTextureGetName(texture);
// // glBindTexture(CVOpenGLESTextureGetTarget(texture), outputTexture);
// glBindTexture(GL_TEXTURE_2D, outputTexture);
// glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
// glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
// glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
// glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
//
// [self updateTargetsForVideoCameraUsingCacheTextureAtWidth:bufferWidth height:bufferHeight time:currentTime];
//
// CVPixelBufferUnlockBaseAddress(cameraFrame, 0);
// CFRelease(texture);
//
// outputTexture = 0;
}
if (_runBenchmark)
{
numberOfFramesCaptured++;
if (numberOfFramesCaptured > INITIALFRAMESTOIGNOREFORBENCHMARK)
{
CFAbsoluteTime currentFrameTime = (CFAbsoluteTimeGetCurrent() - startTime);
totalFrameTimeDuringCapture += currentFrameTime;
NSLog(@"Average frame time : %f ms", [self averageFrameDurationDuringCapture]);
NSLog(@"Current frame time : %f ms", 1000.0 * currentFrameTime);
}
}
}
else
{
CVPixelBufferLockBaseAddress(cameraFrame, 0);
int bytesPerRow = (int) CVPixelBufferGetBytesPerRow(cameraFrame);
outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:CGSizeMake(bytesPerRow / 4, bufferHeight) onlyTexture:YES];
[outputFramebuffer activateFramebuffer];
glBindTexture(GL_TEXTURE_2D, [outputFramebuffer texture]);
// glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, bufferWidth, bufferHeight, 0, GL_BGRA, GL_UNSIGNED_BYTE, CVPixelBufferGetBaseAddress(cameraFrame));
// Using BGRA extension to pull in video frame data directly
// The use of bytesPerRow / 4 accounts for a display glitch present in preview video frames when using the photo preset on the camera
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, bytesPerRow / 4, bufferHeight, 0, GL_BGRA, GL_UNSIGNED_BYTE, CVPixelBufferGetBaseAddress(cameraFrame));
[self updateTargetsForVideoCameraUsingCacheTextureAtWidth:bytesPerRow / 4 height:bufferHeight time:currentTime];
CVPixelBufferUnlockBaseAddress(cameraFrame, 0);
if (_runBenchmark)
{
numberOfFramesCaptured++;
if (numberOfFramesCaptured > INITIALFRAMESTOIGNOREFORBENCHMARK)
{
CFAbsoluteTime currentFrameTime = (CFAbsoluteTimeGetCurrent() - startTime);
totalFrameTimeDuringCapture += currentFrameTime;
}
}
}
}
- (void)processAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer;
{
[self.audioEncodingTarget processAudioBuffer:sampleBuffer];
}
- (void)convertYUVToRGBOutput;
{
[GPUImageContext setActiveShaderProgram:yuvConversionProgram];
int rotatedImageBufferWidth = imageBufferWidth, rotatedImageBufferHeight = imageBufferHeight;
if (GPUImageRotationSwapsWidthAndHeight(internalRotation))
{
rotatedImageBufferWidth = imageBufferHeight;
rotatedImageBufferHeight = imageBufferWidth;
}
outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:CGSizeMake(rotatedImageBufferWidth, rotatedImageBufferHeight) textureOptions:self.outputTextureOptions onlyTexture:NO];
[outputFramebuffer activateFramebuffer];
glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
static const GLfloat squareVertices[] = {
-1.0f, -1.0f,
1.0f, -1.0f,
-1.0f, 1.0f,
1.0f, 1.0f,
};
glActiveTexture(GL_TEXTURE4);
glBindTexture(GL_TEXTURE_2D, luminanceTexture);
glUniform1i(yuvConversionLuminanceTextureUniform, 4);
glActiveTexture(GL_TEXTURE5);
glBindTexture(GL_TEXTURE_2D, chrominanceTexture);
glUniform1i(yuvConversionChrominanceTextureUniform, 5);
glUniformMatrix3fv(yuvConversionMatrixUniform, 1, GL_FALSE, _preferredConversion);
glVertexAttribPointer(yuvConversionPositionAttribute, 2, GL_FLOAT, 0, 0, squareVertices);
glVertexAttribPointer(yuvConversionTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, [GPUImageFilter textureCoordinatesForRotation:internalRotation]);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
}
#pragma mark -
#pragma mark Benchmarking
- (CGFloat)averageFrameDurationDuringCapture;
{
return (totalFrameTimeDuringCapture / (CGFloat)(numberOfFramesCaptured - INITIALFRAMESTOIGNOREFORBENCHMARK)) * 1000.0;
}
- (void)resetBenchmarkAverage;
{
numberOfFramesCaptured = 0;
totalFrameTimeDuringCapture = 0.0;
}
#pragma mark -
#pragma mark AVCaptureVideoDataOutputSampleBufferDelegate
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
if (!self.captureSession.isRunning)
{
return;
}
else if (captureOutput == audioOutput)
{
[self processAudioSampleBuffer:sampleBuffer];
}
else
{
if (dispatch_semaphore_wait(frameRenderingSemaphore, DISPATCH_TIME_NOW) != 0)
{
return;
}
CFRetain(sampleBuffer);
runAsynchronouslyOnVideoProcessingQueue(^{
//Feature Detection Hook.
if (self.delegate)
{
[self.delegate willOutputSampleBuffer:sampleBuffer];
}
[self processVideoSampleBuffer:sampleBuffer];
CFRelease(sampleBuffer);
dispatch_semaphore_signal(frameRenderingSemaphore);
});
}
}
#pragma mark -
#pragma mark Accessors
- (void)setAudioEncodingTarget:(GPUImageMovieWriter *)newValue;
{
if (newValue) {
/* Add audio inputs and outputs, if necessary */
addedAudioInputsDueToEncodingTarget |= [self addAudioInputsAndOutputs];
} else if (addedAudioInputsDueToEncodingTarget) {
/* Remove audio inputs and outputs, if they were added by previously setting the audio encoding target */
[self removeAudioInputsAndOutputs];
addedAudioInputsDueToEncodingTarget = NO;
}
[super setAudioEncodingTarget:newValue];
}
- (void)updateOrientationSendToTargets;
{
runSynchronouslyOnVideoProcessingQueue(^{
// From the iOS 5.0 release notes:
// In previous iOS versions, the front-facing camera would always deliver buffers in AVCaptureVideoOrientationLandscapeLeft and the back-facing camera would always deliver buffers in AVCaptureVideoOrientationLandscapeRight.
if (captureAsYUV && [GPUImageContext supportsFastTextureUpload])
{
outputRotation = kGPUImageNoRotation;
if ([self cameraPosition] == AVCaptureDevicePositionBack)
{
if (_horizontallyMirrorRearFacingCamera)
{
switch(_outputImageOrientation)
{
case UIInterfaceOrientationPortrait:internalRotation = kGPUImageRotateRightFlipVertical; break;
case UIInterfaceOrientationPortraitUpsideDown:internalRotation = kGPUImageRotate180; break;
case UIInterfaceOrientationLandscapeLeft:internalRotation = kGPUImageFlipHorizonal; break;
case UIInterfaceOrientationLandscapeRight:internalRotation = kGPUImageFlipVertical; break;
default:internalRotation = kGPUImageNoRotation;
}
}
else
{
switch(_outputImageOrientation)
{
case UIInterfaceOrientationPortrait:internalRotation = kGPUImageRotateRight; break;
case UIInterfaceOrientationPortraitUpsideDown:internalRotation = kGPUImageRotateLeft; break;
case UIInterfaceOrientationLandscapeLeft:internalRotation = kGPUImageRotate180; break;
case UIInterfaceOrientationLandscapeRight:internalRotation = kGPUImageNoRotation; break;
default:internalRotation = kGPUImageNoRotation;
}
}
}
else
{
if (_horizontallyMirrorFrontFacingCamera)
{
switch(_outputImageOrientation)
{
case UIInterfaceOrientationPortrait:internalRotation = kGPUImageRotateRightFlipVertical; break;
case UIInterfaceOrientationPortraitUpsideDown:internalRotation = kGPUImageRotateRightFlipHorizontal; break;
case UIInterfaceOrientationLandscapeLeft:internalRotation = kGPUImageFlipHorizonal; break;
case UIInterfaceOrientationLandscapeRight:internalRotation = kGPUImageFlipVertical; break;
default:internalRotation = kGPUImageNoRotation;
}
}
else
{
switch(_outputImageOrientation)
{
case UIInterfaceOrientationPortrait:internalRotation = kGPUImageRotateRight; break;
case UIInterfaceOrientationPortraitUpsideDown:internalRotation = kGPUImageRotateLeft; break;
case UIInterfaceOrientationLandscapeLeft:internalRotation = kGPUImageNoRotation; break;
case UIInterfaceOrientationLandscapeRight:internalRotation = kGPUImageRotate180; break;
default:internalRotation = kGPUImageNoRotation;
}
}
}
}
else
{
if ([self cameraPosition] == AVCaptureDevicePositionBack)
{
if (_horizontallyMirrorRearFacingCamera)
{
switch(_outputImageOrientation)
{
case UIInterfaceOrientationPortrait:outputRotation = kGPUImageRotateRightFlipVertical; break;
case UIInterfaceOrientationPortraitUpsideDown:outputRotation = kGPUImageRotate180; break;
case UIInterfaceOrientationLandscapeLeft:outputRotation = kGPUImageFlipHorizonal; break;
case UIInterfaceOrientationLandscapeRight:outputRotation = kGPUImageFlipVertical; break;
default:outputRotation = kGPUImageNoRotation;
}
}
else
{
switch(_outputImageOrientation)
{
case UIInterfaceOrientationPortrait:outputRotation = kGPUImageRotateRight; break;
case UIInterfaceOrientationPortraitUpsideDown:outputRotation = kGPUImageRotateLeft; break;
case UIInterfaceOrientationLandscapeLeft:outputRotation = kGPUImageRotate180; break;
case UIInterfaceOrientationLandscapeRight:outputRotation = kGPUImageNoRotation; break;
default:outputRotation = kGPUImageNoRotation;
}
}
}
else
{
if (_horizontallyMirrorFrontFacingCamera)
{
switch(_outputImageOrientation)
{
case UIInterfaceOrientationPortrait:outputRotation = kGPUImageRotateRightFlipVertical; break;
case UIInterfaceOrientationPortraitUpsideDown:outputRotation = kGPUImageRotateRightFlipHorizontal; break;
case UIInterfaceOrientationLandscapeLeft:outputRotation = kGPUImageFlipHorizonal; break;
case UIInterfaceOrientationLandscapeRight:outputRotation = kGPUImageFlipVertical; break;
default:outputRotation = kGPUImageNoRotation;
}
}
else
{
switch(_outputImageOrientation)
{
case UIInterfaceOrientationPortrait:outputRotation = kGPUImageRotateRight; break;
case UIInterfaceOrientationPortraitUpsideDown:outputRotation = kGPUImageRotateLeft; break;
case UIInterfaceOrientationLandscapeLeft:outputRotation = kGPUImageNoRotation; break;
case UIInterfaceOrientationLandscapeRight:outputRotation = kGPUImageRotate180; break;
default:outputRotation = kGPUImageNoRotation;
}
}
}
}
for (id<GPUImageInput> currentTarget in targets)
{
NSInteger indexOfObject = [targets indexOfObject:currentTarget];
[currentTarget setInputRotation:outputRotation atIndex:[[targetTextureIndices objectAtIndex:indexOfObject] integerValue]];
}
});
}
- (void)setOutputImageOrientation:(UIInterfaceOrientation)newValue;
{
_outputImageOrientation = newValue;
[self updateOrientationSendToTargets];
}
- (void)setHorizontallyMirrorFrontFacingCamera:(BOOL)newValue
{
_horizontallyMirrorFrontFacingCamera = newValue;
[self updateOrientationSendToTargets];
}
- (void)setHorizontallyMirrorRearFacingCamera:(BOOL)newValue
{
_horizontallyMirrorRearFacingCamera = newValue;
[self updateOrientationSendToTargets];
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageVideoCamera.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 8,924 |
```objective-c
#import "GPUImageSolidColorGenerator.h"
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUSolidColorFragmentShaderString = SHADER_STRING
(
precision lowp float;
varying highp vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform vec4 color;
uniform float useExistingAlpha;
void main()
{
lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
gl_FragColor = vec4(color.rgb, max(textureColor.a, 1.0 - useExistingAlpha));
}
);
#else
NSString *const kGPUSolidColorFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform vec4 color;
uniform float useExistingAlpha;
void main()
{
vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
gl_FragColor = vec4(color.rgb, max(textureColor.a, 1.0 - useExistingAlpha));
}
);
#endif
@implementation GPUImageSolidColorGenerator
@synthesize color = _color;
- (id)init;
{
if (!(self = [super initWithFragmentShaderFromString:kGPUSolidColorFragmentShaderString]))
{
return nil;
}
colorUniform = [filterProgram uniformIndex:@"color"];
useExistingAlphaUniform = [filterProgram uniformIndex:@"useExistingAlpha"];
_color = (GPUVector4){0.0f, 0.0f, 0.5f, 1.0f};
self.useExistingAlpha = NO;
return self;
}
- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;
{
if (self.preventRendering)
{
return;
}
runSynchronouslyOnVideoProcessingQueue(^{
[GPUImageContext setActiveShaderProgram:filterProgram];
outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:[self sizeOfFBO] textureOptions:self.outputTextureOptions onlyTexture:NO];
[outputFramebuffer activateFramebuffer];
glClearColor(_color.one, _color.two, _color.three, _color.four);
glClear(GL_COLOR_BUFFER_BIT);
});
}
#pragma mark -
#pragma mark Accessors
- (void)forceProcessingAtSize:(CGSize)frameSize;
{
[super forceProcessingAtSize:frameSize];
if (!CGSizeEqualToSize(inputTextureSize, CGSizeZero))
{
[self newFrameReadyAtTime:kCMTimeIndefinite atIndex:0];
}
}
- (void)addTarget:(id<GPUImageInput>)newTarget atTextureLocation:(NSInteger)textureLocation;
{
[super addTarget:newTarget atTextureLocation:textureLocation];
if (!CGSizeEqualToSize(inputTextureSize, CGSizeZero))
{
[newTarget setInputSize:inputTextureSize atIndex:textureLocation];
[newTarget newFrameReadyAtTime:kCMTimeIndefinite atIndex:textureLocation];
}
}
- (void)setColor:(GPUVector4)newValue;
{
[self setColorRed:newValue.one green:newValue.two blue:newValue.three alpha:newValue.four];
}
- (void)setColorRed:(CGFloat)redComponent green:(CGFloat)greenComponent blue:(CGFloat)blueComponent alpha:(CGFloat)alphaComponent;
{
_color.one = (GLfloat)redComponent;
_color.two = (GLfloat)greenComponent;
_color.three = (GLfloat)blueComponent;
_color.four = (GLfloat)alphaComponent;
// [self setVec4:_color forUniform:colorUniform program:filterProgram];
runAsynchronouslyOnVideoProcessingQueue(^{
[self newFrameReadyAtTime:kCMTimeIndefinite atIndex:0];
});
}
- (void)setUseExistingAlpha:(BOOL)useExistingAlpha;
{
_useExistingAlpha = useExistingAlpha;
[self setInteger:(useExistingAlpha ? 1 : 0) forUniform:useExistingAlphaUniform program:filterProgram];
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageSolidColorGenerator.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 847 |
```objective-c
#import "GPUImageTwoInputFilter.h"
extern NSString *const kGPUImageThreeInputTextureVertexShaderString;
@interface GPUImageThreeInputFilter : GPUImageTwoInputFilter
{
GPUImageFramebuffer *thirdInputFramebuffer;
GLint filterThirdTextureCoordinateAttribute;
GLint filterInputTextureUniform3;
GPUImageRotationMode inputRotation3;
GLuint filterSourceTexture3;
CMTime thirdFrameTime;
BOOL hasSetSecondTexture, hasReceivedThirdFrame, thirdFrameWasVideo;
BOOL thirdFrameCheckDisabled;
}
- (void)disableThirdFrameCheck;
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageThreeInputFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 123 |
```objective-c
#import "GPUImageTwoInputFilter.h"
@interface GPUImageSubtractBlendFilter : GPUImageTwoInputFilter
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageSubtractBlendFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 26 |
```objective-c
#import "GPUImageFilterGroup.h"
@class GPUImageSaturationFilter;
@class GPUImageGaussianBlurFilter;
@class GPUImageLuminanceRangeFilter;
@interface GPUImageiOSBlurFilter : GPUImageFilterGroup
{
GPUImageSaturationFilter *saturationFilter;
GPUImageGaussianBlurFilter *blurFilter;
GPUImageLuminanceRangeFilter *luminanceRangeFilter;
}
/** A radius in pixels to use for the blur, with a default of 12.0. This adjusts the sigma variable in the Gaussian distribution function.
*/
@property (readwrite, nonatomic) CGFloat blurRadiusInPixels;
/** Saturation ranges from 0.0 (fully desaturated) to 2.0 (max saturation), with 0.8 as the normal level
*/
@property (readwrite, nonatomic) CGFloat saturation;
/** The degree to which to downsample, then upsample the incoming image to minimize computations within the Gaussian blur, default of 4.0
*/
@property (readwrite, nonatomic) CGFloat downsampling;
/** The degree to reduce the luminance range, from 0.0 to 1.0. Default is 0.6.
*/
@property (readwrite, nonatomic) CGFloat rangeReductionFactor;
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageiOSBlurFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 259 |
```objective-c
#import "GPUImageSmoothToonFilter.h"
#import "GPUImageGaussianBlurFilter.h"
#import "GPUImageToonFilter.h"
@implementation GPUImageSmoothToonFilter
@synthesize threshold;
@synthesize blurRadiusInPixels;
@synthesize quantizationLevels;
@synthesize texelWidth;
@synthesize texelHeight;
- (id)init;
{
if (!(self = [super init]))
{
return nil;
}
// First pass: apply a variable Gaussian blur
blurFilter = [[GPUImageGaussianBlurFilter alloc] init];
[self addFilter:blurFilter];
// Second pass: run the Sobel edge detection on this blurred image, along with a posterization effect
toonFilter = [[GPUImageToonFilter alloc] init];
[self addFilter:toonFilter];
// Texture location 0 needs to be the sharp image for both the blur and the second stage processing
[blurFilter addTarget:toonFilter];
self.initialFilters = [NSArray arrayWithObject:blurFilter];
self.terminalFilter = toonFilter;
self.blurRadiusInPixels = 2.0;
self.threshold = 0.2;
self.quantizationLevels = 10.0;
return self;
}
#pragma mark -
#pragma mark Accessors
- (void)setBlurRadiusInPixels:(CGFloat)newValue;
{
blurFilter.blurRadiusInPixels = newValue;
}
- (CGFloat)blurRadiusInPixels;
{
return blurFilter.blurRadiusInPixels;
}
- (void)setTexelWidth:(CGFloat)newValue;
{
toonFilter.texelWidth = newValue;
}
- (CGFloat)texelWidth;
{
return toonFilter.texelWidth;
}
- (void)setTexelHeight:(CGFloat)newValue;
{
toonFilter.texelHeight = newValue;
}
- (CGFloat)texelHeight;
{
return toonFilter.texelHeight;
}
- (void)setThreshold:(CGFloat)newValue;
{
toonFilter.threshold = newValue;
}
- (CGFloat)threshold;
{
return toonFilter.threshold;
}
- (void)setQuantizationLevels:(CGFloat)newValue;
{
toonFilter.quantizationLevels = newValue;
}
- (CGFloat)quantizationLevels;
{
return toonFilter.quantizationLevels;
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageSmoothToonFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 485 |
```objective-c
#import "GPUImageAverageLuminanceThresholdFilter.h"
#import "GPUImageLuminosity.h"
#import "GPUImageLuminanceThresholdFilter.h"
@interface GPUImageAverageLuminanceThresholdFilter()
{
GPUImageLuminosity *luminosityFilter;
GPUImageLuminanceThresholdFilter *luminanceThresholdFilter;
}
@end
@implementation GPUImageAverageLuminanceThresholdFilter
@synthesize thresholdMultiplier = _thresholdMultiplier;
#pragma mark -
#pragma mark Initialization and teardown
- (id)init;
{
if (!(self = [super init]))
{
return nil;
}
self.thresholdMultiplier = 1.0;
luminosityFilter = [[GPUImageLuminosity alloc] init];
[self addFilter:luminosityFilter];
luminanceThresholdFilter = [[GPUImageLuminanceThresholdFilter alloc] init];
[self addFilter:luminanceThresholdFilter];
__unsafe_unretained GPUImageAverageLuminanceThresholdFilter *weakSelf = self;
__unsafe_unretained GPUImageLuminanceThresholdFilter *weakThreshold = luminanceThresholdFilter;
[luminosityFilter setLuminosityProcessingFinishedBlock:^(CGFloat luminosity, CMTime frameTime) {
weakThreshold.threshold = luminosity * weakSelf.thresholdMultiplier;
}];
self.initialFilters = [NSArray arrayWithObjects:luminosityFilter, luminanceThresholdFilter, nil];
self.terminalFilter = luminanceThresholdFilter;
return self;
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageAverageLuminanceThresholdFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 313 |
```objective-c
#import "GPUImageFilter.h"
#import "GPUImagePicture.h"
#import <AVFoundation/AVFoundation.h>
// Hardcode the vertex shader for standard filters, but this can be overridden
NSString *const kGPUImageVertexShaderString = SHADER_STRING
(
attribute vec4 position;
attribute vec4 inputTextureCoordinate;
varying vec2 textureCoordinate;
void main()
{
gl_Position = position;
textureCoordinate = inputTextureCoordinate.xy;
}
);
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImagePassthroughFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
void main()
{
gl_FragColor = texture2D(inputImageTexture, textureCoordinate);
}
);
#else
NSString *const kGPUImagePassthroughFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
void main()
{
gl_FragColor = texture2D(inputImageTexture, textureCoordinate);
}
);
#endif
@implementation GPUImageFilter
@synthesize preventRendering = _preventRendering;
@synthesize currentlyReceivingMonochromeInput;
#pragma mark -
#pragma mark Initialization and teardown
- (id)initWithVertexShaderFromString:(NSString *)vertexShaderString fragmentShaderFromString:(NSString *)fragmentShaderString;
{
if (!(self = [super init]))
{
return nil;
}
uniformStateRestorationBlocks = [NSMutableDictionary dictionaryWithCapacity:10];
_preventRendering = NO;
currentlyReceivingMonochromeInput = NO;
inputRotation = kGPUImageNoRotation;
backgroundColorRed = 0.0;
backgroundColorGreen = 0.0;
backgroundColorBlue = 0.0;
backgroundColorAlpha = 0.0;
imageCaptureSemaphore = dispatch_semaphore_create(0);
dispatch_semaphore_signal(imageCaptureSemaphore);
runSynchronouslyOnVideoProcessingQueue(^{
[GPUImageContext useImageProcessingContext];
filterProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:vertexShaderString fragmentShaderString:fragmentShaderString];
if (!filterProgram.initialized)
{
[self initializeAttributes];
if (![filterProgram link])
{
NSString *progLog = [filterProgram programLog];
NSLog(@"Program link log: %@", progLog);
NSString *fragLog = [filterProgram fragmentShaderLog];
NSLog(@"Fragment shader compile log: %@", fragLog);
NSString *vertLog = [filterProgram vertexShaderLog];
NSLog(@"Vertex shader compile log: %@", vertLog);
filterProgram = nil;
NSAssert(NO, @"Filter shader link failed");
}
}
filterPositionAttribute = [filterProgram attributeIndex:@"position"];
filterTextureCoordinateAttribute = [filterProgram attributeIndex:@"inputTextureCoordinate"];
filterInputTextureUniform = [filterProgram uniformIndex:@"inputImageTexture"]; // This does assume a name of "inputImageTexture" for the fragment shader
[GPUImageContext setActiveShaderProgram:filterProgram];
glEnableVertexAttribArray(filterPositionAttribute);
glEnableVertexAttribArray(filterTextureCoordinateAttribute);
});
return self;
}
- (id)initWithFragmentShaderFromString:(NSString *)fragmentShaderString;
{
if (!(self = [self initWithVertexShaderFromString:kGPUImageVertexShaderString fragmentShaderFromString:fragmentShaderString]))
{
return nil;
}
return self;
}
- (id)initWithFragmentShaderFromFile:(NSString *)fragmentShaderFilename;
{
NSString *fragmentShaderPathname = [[NSBundle mainBundle] pathForResource:fragmentShaderFilename ofType:@"fsh"];
NSString *fragmentShaderString = [NSString stringWithContentsOfFile:fragmentShaderPathname encoding:NSUTF8StringEncoding error:nil];
if (!(self = [self initWithFragmentShaderFromString:fragmentShaderString]))
{
return nil;
}
return self;
}
- (id)init;
{
if (!(self = [self initWithFragmentShaderFromString:kGPUImagePassthroughFragmentShaderString]))
{
return nil;
}
return self;
}
- (void)initializeAttributes;
{
[filterProgram addAttribute:@"position"];
[filterProgram addAttribute:@"inputTextureCoordinate"];
// Override this, calling back to this super method, in order to add new attributes to your vertex shader
}
- (void)setupFilterForSize:(CGSize)filterFrameSize;
{
// This is where you can override to provide some custom setup, if your filter has a size-dependent element
}
- (void)dealloc
{
#if !OS_OBJECT_USE_OBJC
if (imageCaptureSemaphore != NULL)
{
dispatch_release(imageCaptureSemaphore);
}
#endif
}
#pragma mark -
#pragma mark Still image processing
- (void)useNextFrameForImageCapture;
{
usingNextFrameForImageCapture = YES;
// Set the semaphore high, if it isn't already
if (dispatch_semaphore_wait(imageCaptureSemaphore, DISPATCH_TIME_NOW) != 0)
{
return;
}
}
- (CGImageRef)newCGImageFromCurrentlyProcessedOutput
{
// Give it three seconds to process, then abort if they forgot to set up the image capture properly
double timeoutForImageCapture = 3.0;
dispatch_time_t convertedTimeout = dispatch_time(DISPATCH_TIME_NOW, timeoutForImageCapture * NSEC_PER_SEC);
if (dispatch_semaphore_wait(imageCaptureSemaphore, convertedTimeout) != 0)
{
return NULL;
}
GPUImageFramebuffer* framebuffer = [self framebufferForOutput];
usingNextFrameForImageCapture = NO;
dispatch_semaphore_signal(imageCaptureSemaphore);
CGImageRef image = [framebuffer newCGImageFromFramebufferContents];
return image;
}
#pragma mark -
#pragma mark Managing the display FBOs
- (CGSize)sizeOfFBO;
{
CGSize outputSize = [self maximumOutputSize];
if ( (CGSizeEqualToSize(outputSize, CGSizeZero)) || (inputTextureSize.width < outputSize.width) )
{
return inputTextureSize;
}
else
{
return outputSize;
}
}
#pragma mark -
#pragma mark Rendering
+ (const GLfloat *)textureCoordinatesForRotation:(GPUImageRotationMode)rotationMode;
{
static const GLfloat noRotationTextureCoordinates[] = {
0.0f, 0.0f,
1.0f, 0.0f,
0.0f, 1.0f,
1.0f, 1.0f,
};
static const GLfloat rotateLeftTextureCoordinates[] = {
1.0f, 0.0f,
1.0f, 1.0f,
0.0f, 0.0f,
0.0f, 1.0f,
};
static const GLfloat rotateRightTextureCoordinates[] = {
0.0f, 1.0f,
0.0f, 0.0f,
1.0f, 1.0f,
1.0f, 0.0f,
};
static const GLfloat verticalFlipTextureCoordinates[] = {
0.0f, 1.0f,
1.0f, 1.0f,
0.0f, 0.0f,
1.0f, 0.0f,
};
static const GLfloat horizontalFlipTextureCoordinates[] = {
1.0f, 0.0f,
0.0f, 0.0f,
1.0f, 1.0f,
0.0f, 1.0f,
};
static const GLfloat rotateRightVerticalFlipTextureCoordinates[] = {
0.0f, 0.0f,
0.0f, 1.0f,
1.0f, 0.0f,
1.0f, 1.0f,
};
static const GLfloat rotateRightHorizontalFlipTextureCoordinates[] = {
1.0f, 1.0f,
1.0f, 0.0f,
0.0f, 1.0f,
0.0f, 0.0f,
};
static const GLfloat rotate180TextureCoordinates[] = {
1.0f, 1.0f,
0.0f, 1.0f,
1.0f, 0.0f,
0.0f, 0.0f,
};
switch(rotationMode)
{
case kGPUImageNoRotation: return noRotationTextureCoordinates;
case kGPUImageRotateLeft: return rotateLeftTextureCoordinates;
case kGPUImageRotateRight: return rotateRightTextureCoordinates;
case kGPUImageFlipVertical: return verticalFlipTextureCoordinates;
case kGPUImageFlipHorizonal: return horizontalFlipTextureCoordinates;
case kGPUImageRotateRightFlipVertical: return rotateRightVerticalFlipTextureCoordinates;
case kGPUImageRotateRightFlipHorizontal: return rotateRightHorizontalFlipTextureCoordinates;
case kGPUImageRotate180: return rotate180TextureCoordinates;
}
}
- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;
{
if (self.preventRendering)
{
[firstInputFramebuffer unlock];
return;
}
[GPUImageContext setActiveShaderProgram:filterProgram];
outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:[self sizeOfFBO] textureOptions:self.outputTextureOptions onlyTexture:NO];
[outputFramebuffer activateFramebuffer];
if (usingNextFrameForImageCapture)
{
[outputFramebuffer lock];
}
[self setUniformsForProgramAtIndex:0];
glClearColor(backgroundColorRed, backgroundColorGreen, backgroundColorBlue, backgroundColorAlpha);
glClear(GL_COLOR_BUFFER_BIT);
glActiveTexture(GL_TEXTURE2);
glBindTexture(GL_TEXTURE_2D, [firstInputFramebuffer texture]);
glUniform1i(filterInputTextureUniform, 2);
glVertexAttribPointer(filterPositionAttribute, 2, GL_FLOAT, 0, 0, vertices);
glVertexAttribPointer(filterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
[firstInputFramebuffer unlock];
if (usingNextFrameForImageCapture)
{
dispatch_semaphore_signal(imageCaptureSemaphore);
}
}
- (void)informTargetsAboutNewFrameAtTime:(CMTime)frameTime;
{
if (self.frameProcessingCompletionBlock != NULL)
{
self.frameProcessingCompletionBlock(self, frameTime);
}
// Get all targets the framebuffer so they can grab a lock on it
for (id<GPUImageInput> currentTarget in targets)
{
if (currentTarget != self.targetToIgnoreForUpdates)
{
NSInteger indexOfObject = [targets indexOfObject:currentTarget];
NSInteger textureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
[self setInputFramebufferForTarget:currentTarget atIndex:textureIndex];
[currentTarget setInputSize:[self outputFrameSize] atIndex:textureIndex];
}
}
// Release our hold so it can return to the cache immediately upon processing
[[self framebufferForOutput] unlock];
if (usingNextFrameForImageCapture)
{
// usingNextFrameForImageCapture = NO;
}
else
{
[self removeOutputFramebuffer];
}
// Trigger processing last, so that our unlock comes first in serial execution, avoiding the need for a callback
for (id<GPUImageInput> currentTarget in targets)
{
if (currentTarget != self.targetToIgnoreForUpdates)
{
NSInteger indexOfObject = [targets indexOfObject:currentTarget];
NSInteger textureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
[currentTarget newFrameReadyAtTime:frameTime atIndex:textureIndex];
}
}
}
- (CGSize)outputFrameSize;
{
return inputTextureSize;
}
#pragma mark -
#pragma mark Input parameters
- (void)setBackgroundColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent alpha:(GLfloat)alphaComponent;
{
backgroundColorRed = redComponent;
backgroundColorGreen = greenComponent;
backgroundColorBlue = blueComponent;
backgroundColorAlpha = alphaComponent;
}
- (void)setInteger:(GLint)newInteger forUniformName:(NSString *)uniformName;
{
GLint uniformIndex = [filterProgram uniformIndex:uniformName];
[self setInteger:newInteger forUniform:uniformIndex program:filterProgram];
}
- (void)setFloat:(GLfloat)newFloat forUniformName:(NSString *)uniformName;
{
GLint uniformIndex = [filterProgram uniformIndex:uniformName];
[self setFloat:newFloat forUniform:uniformIndex program:filterProgram];
}
- (void)setSize:(CGSize)newSize forUniformName:(NSString *)uniformName;
{
GLint uniformIndex = [filterProgram uniformIndex:uniformName];
[self setSize:newSize forUniform:uniformIndex program:filterProgram];
}
- (void)setPoint:(CGPoint)newPoint forUniformName:(NSString *)uniformName;
{
GLint uniformIndex = [filterProgram uniformIndex:uniformName];
[self setPoint:newPoint forUniform:uniformIndex program:filterProgram];
}
- (void)setFloatVec3:(GPUVector3)newVec3 forUniformName:(NSString *)uniformName;
{
GLint uniformIndex = [filterProgram uniformIndex:uniformName];
[self setVec3:newVec3 forUniform:uniformIndex program:filterProgram];
}
- (void)setFloatVec4:(GPUVector4)newVec4 forUniform:(NSString *)uniformName;
{
GLint uniformIndex = [filterProgram uniformIndex:uniformName];
[self setVec4:newVec4 forUniform:uniformIndex program:filterProgram];
}
- (void)setFloatArray:(GLfloat *)array length:(GLsizei)count forUniform:(NSString*)uniformName
{
GLint uniformIndex = [filterProgram uniformIndex:uniformName];
[self setFloatArray:array length:count forUniform:uniformIndex program:filterProgram];
}
- (void)setMatrix3f:(GPUMatrix3x3)matrix forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;
{
runAsynchronouslyOnVideoProcessingQueue(^{
[GPUImageContext setActiveShaderProgram:shaderProgram];
[self setAndExecuteUniformStateCallbackAtIndex:uniform forProgram:shaderProgram toBlock:^{
glUniformMatrix3fv(uniform, 1, GL_FALSE, (GLfloat *)&matrix);
}];
});
}
- (void)setMatrix4f:(GPUMatrix4x4)matrix forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;
{
runAsynchronouslyOnVideoProcessingQueue(^{
[GPUImageContext setActiveShaderProgram:shaderProgram];
[self setAndExecuteUniformStateCallbackAtIndex:uniform forProgram:shaderProgram toBlock:^{
glUniformMatrix4fv(uniform, 1, GL_FALSE, (GLfloat *)&matrix);
}];
});
}
- (void)setFloat:(GLfloat)floatValue forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;
{
runAsynchronouslyOnVideoProcessingQueue(^{
[GPUImageContext setActiveShaderProgram:shaderProgram];
[self setAndExecuteUniformStateCallbackAtIndex:uniform forProgram:shaderProgram toBlock:^{
glUniform1f(uniform, floatValue);
}];
});
}
- (void)setPoint:(CGPoint)pointValue forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;
{
runAsynchronouslyOnVideoProcessingQueue(^{
[GPUImageContext setActiveShaderProgram:shaderProgram];
[self setAndExecuteUniformStateCallbackAtIndex:uniform forProgram:shaderProgram toBlock:^{
GLfloat positionArray[2];
positionArray[0] = pointValue.x;
positionArray[1] = pointValue.y;
glUniform2fv(uniform, 1, positionArray);
}];
});
}
- (void)setSize:(CGSize)sizeValue forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;
{
runAsynchronouslyOnVideoProcessingQueue(^{
[GPUImageContext setActiveShaderProgram:shaderProgram];
[self setAndExecuteUniformStateCallbackAtIndex:uniform forProgram:shaderProgram toBlock:^{
GLfloat sizeArray[2];
sizeArray[0] = sizeValue.width;
sizeArray[1] = sizeValue.height;
glUniform2fv(uniform, 1, sizeArray);
}];
});
}
- (void)setVec3:(GPUVector3)vectorValue forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;
{
runAsynchronouslyOnVideoProcessingQueue(^{
[GPUImageContext setActiveShaderProgram:shaderProgram];
[self setAndExecuteUniformStateCallbackAtIndex:uniform forProgram:shaderProgram toBlock:^{
glUniform3fv(uniform, 1, (GLfloat *)&vectorValue);
}];
});
}
- (void)setVec4:(GPUVector4)vectorValue forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;
{
runAsynchronouslyOnVideoProcessingQueue(^{
[GPUImageContext setActiveShaderProgram:shaderProgram];
[self setAndExecuteUniformStateCallbackAtIndex:uniform forProgram:shaderProgram toBlock:^{
glUniform4fv(uniform, 1, (GLfloat *)&vectorValue);
}];
});
}
- (void)setFloatArray:(GLfloat *)arrayValue length:(GLsizei)arrayLength forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;
{
// Make a copy of the data, so it doesn't get overwritten before async call executes
NSData* arrayData = [NSData dataWithBytes:arrayValue length:arrayLength * sizeof(arrayValue[0])];
runAsynchronouslyOnVideoProcessingQueue(^{
[GPUImageContext setActiveShaderProgram:shaderProgram];
[self setAndExecuteUniformStateCallbackAtIndex:uniform forProgram:shaderProgram toBlock:^{
glUniform1fv(uniform, arrayLength, [arrayData bytes]);
}];
});
}
- (void)setInteger:(GLint)intValue forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;
{
runAsynchronouslyOnVideoProcessingQueue(^{
[GPUImageContext setActiveShaderProgram:shaderProgram];
[self setAndExecuteUniformStateCallbackAtIndex:uniform forProgram:shaderProgram toBlock:^{
glUniform1i(uniform, intValue);
}];
});
}
- (void)setAndExecuteUniformStateCallbackAtIndex:(GLint)uniform forProgram:(GLProgram *)shaderProgram toBlock:(dispatch_block_t)uniformStateBlock;
{
[uniformStateRestorationBlocks setObject:[uniformStateBlock copy] forKey:[NSNumber numberWithInt:uniform]];
uniformStateBlock();
}
- (void)setUniformsForProgramAtIndex:(NSUInteger)programIndex;
{
[uniformStateRestorationBlocks enumerateKeysAndObjectsUsingBlock:^(id key, id obj, BOOL *stop){
dispatch_block_t currentBlock = obj;
currentBlock();
}];
}
#pragma mark -
#pragma mark GPUImageInput
- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex;
{
static const GLfloat imageVertices[] = {
-1.0f, -1.0f,
1.0f, -1.0f,
-1.0f, 1.0f,
1.0f, 1.0f,
};
[self renderToTextureWithVertices:imageVertices textureCoordinates:[[self class] textureCoordinatesForRotation:inputRotation]];
[self informTargetsAboutNewFrameAtTime:frameTime];
}
- (NSInteger)nextAvailableTextureIndex;
{
return 0;
}
- (void)setInputFramebuffer:(GPUImageFramebuffer *)newInputFramebuffer atIndex:(NSInteger)textureIndex;
{
firstInputFramebuffer = newInputFramebuffer;
[firstInputFramebuffer lock];
}
- (CGSize)rotatedSize:(CGSize)sizeToRotate forIndex:(NSInteger)textureIndex;
{
CGSize rotatedSize = sizeToRotate;
if (GPUImageRotationSwapsWidthAndHeight(inputRotation))
{
rotatedSize.width = sizeToRotate.height;
rotatedSize.height = sizeToRotate.width;
}
return rotatedSize;
}
- (CGPoint)rotatedPoint:(CGPoint)pointToRotate forRotation:(GPUImageRotationMode)rotation;
{
CGPoint rotatedPoint;
switch(rotation)
{
case kGPUImageNoRotation: return pointToRotate; break;
case kGPUImageFlipHorizonal:
{
rotatedPoint.x = 1.0 - pointToRotate.x;
rotatedPoint.y = pointToRotate.y;
}; break;
case kGPUImageFlipVertical:
{
rotatedPoint.x = pointToRotate.x;
rotatedPoint.y = 1.0 - pointToRotate.y;
}; break;
case kGPUImageRotateLeft:
{
rotatedPoint.x = 1.0 - pointToRotate.y;
rotatedPoint.y = pointToRotate.x;
}; break;
case kGPUImageRotateRight:
{
rotatedPoint.x = pointToRotate.y;
rotatedPoint.y = 1.0 - pointToRotate.x;
}; break;
case kGPUImageRotateRightFlipVertical:
{
rotatedPoint.x = pointToRotate.y;
rotatedPoint.y = pointToRotate.x;
}; break;
case kGPUImageRotateRightFlipHorizontal:
{
rotatedPoint.x = 1.0 - pointToRotate.y;
rotatedPoint.y = 1.0 - pointToRotate.x;
}; break;
case kGPUImageRotate180:
{
rotatedPoint.x = 1.0 - pointToRotate.x;
rotatedPoint.y = 1.0 - pointToRotate.y;
}; break;
}
return rotatedPoint;
}
- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;
{
if (self.preventRendering)
{
return;
}
if (overrideInputSize)
{
if (CGSizeEqualToSize(forcedMaximumSize, CGSizeZero))
{
}
else
{
CGRect insetRect = AVMakeRectWithAspectRatioInsideRect(newSize, CGRectMake(0.0, 0.0, forcedMaximumSize.width, forcedMaximumSize.height));
inputTextureSize = insetRect.size;
}
}
else
{
CGSize rotatedSize = [self rotatedSize:newSize forIndex:textureIndex];
if (CGSizeEqualToSize(rotatedSize, CGSizeZero))
{
inputTextureSize = rotatedSize;
}
else if (!CGSizeEqualToSize(inputTextureSize, rotatedSize))
{
inputTextureSize = rotatedSize;
}
}
[self setupFilterForSize:[self sizeOfFBO]];
}
- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;
{
inputRotation = newInputRotation;
}
- (void)forceProcessingAtSize:(CGSize)frameSize;
{
if (CGSizeEqualToSize(frameSize, CGSizeZero))
{
overrideInputSize = NO;
}
else
{
overrideInputSize = YES;
inputTextureSize = frameSize;
forcedMaximumSize = CGSizeZero;
}
}
- (void)forceProcessingAtSizeRespectingAspectRatio:(CGSize)frameSize;
{
if (CGSizeEqualToSize(frameSize, CGSizeZero))
{
overrideInputSize = NO;
inputTextureSize = CGSizeZero;
forcedMaximumSize = CGSizeZero;
}
else
{
overrideInputSize = YES;
forcedMaximumSize = frameSize;
}
}
- (CGSize)maximumOutputSize;
{
// I'm temporarily disabling adjustments for smaller output sizes until I figure out how to make this work better
return CGSizeZero;
/*
if (CGSizeEqualToSize(cachedMaximumOutputSize, CGSizeZero))
{
for (id<GPUImageInput> currentTarget in targets)
{
if ([currentTarget maximumOutputSize].width > cachedMaximumOutputSize.width)
{
cachedMaximumOutputSize = [currentTarget maximumOutputSize];
}
}
}
return cachedMaximumOutputSize;
*/
}
- (void)endProcessing
{
if (!isEndProcessing)
{
isEndProcessing = YES;
for (id<GPUImageInput> currentTarget in targets)
{
[currentTarget endProcessing];
}
}
}
- (BOOL)wantsMonochromeInput;
{
return NO;
}
#pragma mark -
#pragma mark Accessors
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 5,257 |
```objective-c
#import "GPUImageExclusionBlendFilter.h"
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageExclusionBlendFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
varying highp vec2 textureCoordinate2;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
void main()
{
mediump vec4 base = texture2D(inputImageTexture, textureCoordinate);
mediump vec4 overlay = texture2D(inputImageTexture2, textureCoordinate2);
// Dca = (Sca.Da + Dca.Sa - 2.Sca.Dca) + Sca.(1 - Da) + Dca.(1 - Sa)
gl_FragColor = vec4((overlay.rgb * base.a + base.rgb * overlay.a - 2.0 * overlay.rgb * base.rgb) + overlay.rgb * (1.0 - base.a) + base.rgb * (1.0 - overlay.a), base.a);
}
);
#else
NSString *const kGPUImageExclusionBlendFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
varying vec2 textureCoordinate2;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
void main()
{
vec4 base = texture2D(inputImageTexture, textureCoordinate);
vec4 overlay = texture2D(inputImageTexture2, textureCoordinate2);
// Dca = (Sca.Da + Dca.Sa - 2.Sca.Dca) + Sca.(1 - Da) + Dca.(1 - Sa)
gl_FragColor = vec4((overlay.rgb * base.a + base.rgb * overlay.a - 2.0 * overlay.rgb * base.rgb) + overlay.rgb * (1.0 - base.a) + base.rgb * (1.0 - overlay.a), base.a);
}
);
#endif
@implementation GPUImageExclusionBlendFilter
- (id)init;
{
if (!(self = [super initWithFragmentShaderFromString:kGPUImageExclusionBlendFragmentShaderString]))
{
return nil;
}
return self;
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageExclusionBlendFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 463 |
```objective-c
#import "GPUImageErosionFilter.h"
#import "GPUImageDilationFilter.h"
@implementation GPUImageErosionFilter
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageErosionRadiusOneFragmentShaderString = SHADER_STRING
(
precision lowp float;
varying vec2 centerTextureCoordinate;
varying vec2 oneStepPositiveTextureCoordinate;
varying vec2 oneStepNegativeTextureCoordinate;
uniform sampler2D inputImageTexture;
void main()
{
float centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate).r;
float oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate).r;
float oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate).r;
lowp float minValue = min(centerIntensity, oneStepPositiveIntensity);
minValue = min(minValue, oneStepNegativeIntensity);
gl_FragColor = vec4(vec3(minValue), 1.0);
}
);
NSString *const kGPUImageErosionRadiusTwoFragmentShaderString = SHADER_STRING
(
precision lowp float;
varying vec2 centerTextureCoordinate;
varying vec2 oneStepPositiveTextureCoordinate;
varying vec2 oneStepNegativeTextureCoordinate;
varying vec2 twoStepsPositiveTextureCoordinate;
varying vec2 twoStepsNegativeTextureCoordinate;
uniform sampler2D inputImageTexture;
void main()
{
float centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate).r;
float oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate).r;
float oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate).r;
float twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate).r;
float twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate).r;
lowp float minValue = min(centerIntensity, oneStepPositiveIntensity);
minValue = min(minValue, oneStepNegativeIntensity);
minValue = min(minValue, twoStepsPositiveIntensity);
minValue = min(minValue, twoStepsNegativeIntensity);
gl_FragColor = vec4(vec3(minValue), 1.0);
}
);
NSString *const kGPUImageErosionRadiusThreeFragmentShaderString = SHADER_STRING
(
precision lowp float;
varying vec2 centerTextureCoordinate;
varying vec2 oneStepPositiveTextureCoordinate;
varying vec2 oneStepNegativeTextureCoordinate;
varying vec2 twoStepsPositiveTextureCoordinate;
varying vec2 twoStepsNegativeTextureCoordinate;
varying vec2 threeStepsPositiveTextureCoordinate;
varying vec2 threeStepsNegativeTextureCoordinate;
uniform sampler2D inputImageTexture;
void main()
{
float centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate).r;
float oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate).r;
float oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate).r;
float twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate).r;
float twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate).r;
float threeStepsPositiveIntensity = texture2D(inputImageTexture, threeStepsPositiveTextureCoordinate).r;
float threeStepsNegativeIntensity = texture2D(inputImageTexture, threeStepsNegativeTextureCoordinate).r;
lowp float minValue = min(centerIntensity, oneStepPositiveIntensity);
minValue = min(minValue, oneStepNegativeIntensity);
minValue = min(minValue, twoStepsPositiveIntensity);
minValue = min(minValue, twoStepsNegativeIntensity);
minValue = min(minValue, threeStepsPositiveIntensity);
minValue = min(minValue, threeStepsNegativeIntensity);
gl_FragColor = vec4(vec3(minValue), 1.0);
}
);
NSString *const kGPUImageErosionRadiusFourFragmentShaderString = SHADER_STRING
(
precision lowp float;
varying vec2 centerTextureCoordinate;
varying vec2 oneStepPositiveTextureCoordinate;
varying vec2 oneStepNegativeTextureCoordinate;
varying vec2 twoStepsPositiveTextureCoordinate;
varying vec2 twoStepsNegativeTextureCoordinate;
varying vec2 threeStepsPositiveTextureCoordinate;
varying vec2 threeStepsNegativeTextureCoordinate;
varying vec2 fourStepsPositiveTextureCoordinate;
varying vec2 fourStepsNegativeTextureCoordinate;
uniform sampler2D inputImageTexture;
void main()
{
float centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate).r;
float oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate).r;
float oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate).r;
float twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate).r;
float twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate).r;
float threeStepsPositiveIntensity = texture2D(inputImageTexture, threeStepsPositiveTextureCoordinate).r;
float threeStepsNegativeIntensity = texture2D(inputImageTexture, threeStepsNegativeTextureCoordinate).r;
float fourStepsPositiveIntensity = texture2D(inputImageTexture, fourStepsPositiveTextureCoordinate).r;
float fourStepsNegativeIntensity = texture2D(inputImageTexture, fourStepsNegativeTextureCoordinate).r;
lowp float minValue = min(centerIntensity, oneStepPositiveIntensity);
minValue = min(minValue, oneStepNegativeIntensity);
minValue = min(minValue, twoStepsPositiveIntensity);
minValue = min(minValue, twoStepsNegativeIntensity);
minValue = min(minValue, threeStepsPositiveIntensity);
minValue = min(minValue, threeStepsNegativeIntensity);
minValue = min(minValue, fourStepsPositiveIntensity);
minValue = min(minValue, fourStepsNegativeIntensity);
gl_FragColor = vec4(vec3(minValue), 1.0);
}
);
#else
NSString *const kGPUImageErosionRadiusOneFragmentShaderString = SHADER_STRING
(
varying vec2 centerTextureCoordinate;
varying vec2 oneStepPositiveTextureCoordinate;
varying vec2 oneStepNegativeTextureCoordinate;
uniform sampler2D inputImageTexture;
void main()
{
float centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate).r;
float oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate).r;
float oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate).r;
float minValue = min(centerIntensity, oneStepPositiveIntensity);
minValue = min(minValue, oneStepNegativeIntensity);
gl_FragColor = vec4(vec3(minValue), 1.0);
}
);
NSString *const kGPUImageErosionRadiusTwoFragmentShaderString = SHADER_STRING
(
varying vec2 centerTextureCoordinate;
varying vec2 oneStepPositiveTextureCoordinate;
varying vec2 oneStepNegativeTextureCoordinate;
varying vec2 twoStepsPositiveTextureCoordinate;
varying vec2 twoStepsNegativeTextureCoordinate;
uniform sampler2D inputImageTexture;
void main()
{
float centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate).r;
float oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate).r;
float oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate).r;
float twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate).r;
float twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate).r;
float minValue = min(centerIntensity, oneStepPositiveIntensity);
minValue = min(minValue, oneStepNegativeIntensity);
minValue = min(minValue, twoStepsPositiveIntensity);
minValue = min(minValue, twoStepsNegativeIntensity);
gl_FragColor = vec4(vec3(minValue), 1.0);
}
);
NSString *const kGPUImageErosionRadiusThreeFragmentShaderString = SHADER_STRING
(
varying vec2 centerTextureCoordinate;
varying vec2 oneStepPositiveTextureCoordinate;
varying vec2 oneStepNegativeTextureCoordinate;
varying vec2 twoStepsPositiveTextureCoordinate;
varying vec2 twoStepsNegativeTextureCoordinate;
varying vec2 threeStepsPositiveTextureCoordinate;
varying vec2 threeStepsNegativeTextureCoordinate;
uniform sampler2D inputImageTexture;
void main()
{
float centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate).r;
float oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate).r;
float oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate).r;
float twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate).r;
float twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate).r;
float threeStepsPositiveIntensity = texture2D(inputImageTexture, threeStepsPositiveTextureCoordinate).r;
float threeStepsNegativeIntensity = texture2D(inputImageTexture, threeStepsNegativeTextureCoordinate).r;
float minValue = min(centerIntensity, oneStepPositiveIntensity);
minValue = min(minValue, oneStepNegativeIntensity);
minValue = min(minValue, twoStepsPositiveIntensity);
minValue = min(minValue, twoStepsNegativeIntensity);
minValue = min(minValue, threeStepsPositiveIntensity);
minValue = min(minValue, threeStepsNegativeIntensity);
gl_FragColor = vec4(vec3(minValue), 1.0);
}
);
NSString *const kGPUImageErosionRadiusFourFragmentShaderString = SHADER_STRING
(
varying vec2 centerTextureCoordinate;
varying vec2 oneStepPositiveTextureCoordinate;
varying vec2 oneStepNegativeTextureCoordinate;
varying vec2 twoStepsPositiveTextureCoordinate;
varying vec2 twoStepsNegativeTextureCoordinate;
varying vec2 threeStepsPositiveTextureCoordinate;
varying vec2 threeStepsNegativeTextureCoordinate;
varying vec2 fourStepsPositiveTextureCoordinate;
varying vec2 fourStepsNegativeTextureCoordinate;
uniform sampler2D inputImageTexture;
void main()
{
float centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate).r;
float oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate).r;
float oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate).r;
float twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate).r;
float twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate).r;
float threeStepsPositiveIntensity = texture2D(inputImageTexture, threeStepsPositiveTextureCoordinate).r;
float threeStepsNegativeIntensity = texture2D(inputImageTexture, threeStepsNegativeTextureCoordinate).r;
float fourStepsPositiveIntensity = texture2D(inputImageTexture, fourStepsPositiveTextureCoordinate).r;
float fourStepsNegativeIntensity = texture2D(inputImageTexture, fourStepsNegativeTextureCoordinate).r;
float minValue = min(centerIntensity, oneStepPositiveIntensity);
minValue = min(minValue, oneStepNegativeIntensity);
minValue = min(minValue, twoStepsPositiveIntensity);
minValue = min(minValue, twoStepsNegativeIntensity);
minValue = min(minValue, threeStepsPositiveIntensity);
minValue = min(minValue, threeStepsNegativeIntensity);
minValue = min(minValue, fourStepsPositiveIntensity);
minValue = min(minValue, fourStepsNegativeIntensity);
gl_FragColor = vec4(vec3(minValue), 1.0);
}
);
#endif
#pragma mark -
#pragma mark Initialization and teardown
- (id)initWithRadius:(NSUInteger)dilationRadius;
{
NSString *fragmentShaderForThisRadius = nil;
NSString *vertexShaderForThisRadius = nil;
switch (dilationRadius)
{
case 0:
case 1:
{
vertexShaderForThisRadius = kGPUImageDilationRadiusOneVertexShaderString;
fragmentShaderForThisRadius = kGPUImageErosionRadiusOneFragmentShaderString;
}; break;
case 2:
{
vertexShaderForThisRadius = kGPUImageDilationRadiusTwoVertexShaderString;
fragmentShaderForThisRadius = kGPUImageErosionRadiusTwoFragmentShaderString;
}; break;
case 3:
{
vertexShaderForThisRadius = kGPUImageDilationRadiusThreeVertexShaderString;
fragmentShaderForThisRadius = kGPUImageErosionRadiusThreeFragmentShaderString;
}; break;
case 4:
{
vertexShaderForThisRadius = kGPUImageDilationRadiusFourVertexShaderString;
fragmentShaderForThisRadius = kGPUImageErosionRadiusFourFragmentShaderString;
}; break;
default:
{
vertexShaderForThisRadius = kGPUImageDilationRadiusFourVertexShaderString;
fragmentShaderForThisRadius = kGPUImageErosionRadiusFourFragmentShaderString;
}; break;
}
if (!(self = [super initWithFirstStageVertexShaderFromString:vertexShaderForThisRadius firstStageFragmentShaderFromString:fragmentShaderForThisRadius secondStageVertexShaderFromString:vertexShaderForThisRadius secondStageFragmentShaderFromString:fragmentShaderForThisRadius]))
{
return nil;
}
return self;
}
- (id)init;
{
if (!(self = [self initWithRadius:1]))
{
return nil;
}
return self;
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageErosionFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 2,853 |
```objective-c
#import "GPUImageFilterGroup.h"
@class GPUImageGrayscaleFilter;
@class GPUImageSingleComponentGaussianBlurFilter;
@class GPUImageDirectionalSobelEdgeDetectionFilter;
@class GPUImageDirectionalNonMaximumSuppressionFilter;
@class GPUImageWeakPixelInclusionFilter;
/** This applies the edge detection process described by John Canny in
Canny, J., A Computational Approach To Edge Detection, IEEE Trans. Pattern Analysis and Machine Intelligence, 8(6):679698, 1986.
and implemented in OpenGL ES by
A. Ensor, S. Hall. GPU-based Image Analysis on Mobile Devices. Proceedings of Image and Vision Computing New Zealand 2011.
It starts with a conversion to luminance, followed by an accelerated 9-hit Gaussian blur. A Sobel operator is applied to obtain the overall
gradient strength in the blurred image, as well as the direction (in texture sampling steps) of the gradient. A non-maximum suppression filter
acts along the direction of the gradient, highlighting strong edges that pass the threshold and completely removing those that fail the lower
threshold. Finally, pixels from in-between these thresholds are either included in edges or rejected based on neighboring pixels.
*/
@interface GPUImageCannyEdgeDetectionFilter : GPUImageFilterGroup
{
GPUImageGrayscaleFilter *luminanceFilter;
GPUImageSingleComponentGaussianBlurFilter *blurFilter;
GPUImageDirectionalSobelEdgeDetectionFilter *edgeDetectionFilter;
GPUImageDirectionalNonMaximumSuppressionFilter *nonMaximumSuppressionFilter;
GPUImageWeakPixelInclusionFilter *weakPixelInclusionFilter;
}
/** The image width and height factors tweak the appearance of the edges.
These parameters affect the visibility of the detected edges
By default, they match the inverse of the filter size in pixels
*/
@property(readwrite, nonatomic) CGFloat texelWidth;
/** The image width and height factors tweak the appearance of the edges.
These parameters affect the visibility of the detected edges
By default, they match the inverse of the filter size in pixels
*/
@property(readwrite, nonatomic) CGFloat texelHeight;
/** The underlying blur radius for the Gaussian blur. Default is 2.0.
*/
@property (readwrite, nonatomic) CGFloat blurRadiusInPixels;
/** The underlying blur texel spacing multiplier. Default is 1.0.
*/
@property (readwrite, nonatomic) CGFloat blurTexelSpacingMultiplier;
/** Any edge with a gradient magnitude above this threshold will pass and show up in the final result.
*/
@property(readwrite, nonatomic) CGFloat upperThreshold;
/** Any edge with a gradient magnitude below this threshold will fail and be removed from the final result.
*/
@property(readwrite, nonatomic) CGFloat lowerThreshold;
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageCannyEdgeDetectionFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 567 |
```objective-c
#import "GPUImageTwoPassTextureSamplingFilter.h"
/** A more generalized 9x9 Gaussian blur filter
*/
@interface GPUImageGaussianBlurPositionFilter : GPUImageTwoPassTextureSamplingFilter
{
GLint blurCenterUniform, blurRadiusUniform, aspectRatioUniform;
}
/** A multiplier for the blur size, ranging from 0.0 on up, with a default of 1.0
*/
@property (readwrite, nonatomic) CGFloat blurSize;
/** Center for the blur, defaults to 0.5, 0.5
*/
@property (readwrite, nonatomic) CGPoint blurCenter;
/** Radius for the blur, defaults to 1.0
*/
@property (readwrite, nonatomic) CGFloat blurRadius;
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageGaussianBlurPositionFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 153 |
```objective-c
// Created by Jorge Garcia on 9/5/12.
#import "GPUImageNormalBlendFilter.h"
/*
This equation is a simplification of the general blending equation. It assumes the destination color is opaque, and therefore drops the destination color's alpha term.
D = C1 * C1a + C2 * C2a * (1 - C1a)
where D is the resultant color, C1 is the color of the first element, C1a is the alpha of the first element, C2 is the second element color, C2a is the alpha of the second element. The destination alpha is calculated with:
Da = C1a + C2a * (1 - C1a)
The resultant color is premultiplied with the alpha. To restore the color to the unmultiplied values, just divide by Da, the resultant alpha.
path_to_url
For some reason Photoshop behaves
D = C1 + C2 * C2a * (1 - C1a)
*/
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageNormalBlendFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
varying highp vec2 textureCoordinate2;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
void main()
{
lowp vec4 c2 = texture2D(inputImageTexture, textureCoordinate);
lowp vec4 c1 = texture2D(inputImageTexture2, textureCoordinate2);
lowp vec4 outputColor;
// outputColor.r = c1.r + c2.r * c2.a * (1.0 - c1.a);
// outputColor.g = c1.g + c2.g * c2.a * (1.0 - c1.a);
// outputColor.b = c1.b + c2.b * c2.a * (1.0 - c1.a);
// outputColor.a = c1.a + c2.a * (1.0 - c1.a);
lowp float a = c1.a + c2.a * (1.0 - c1.a);
lowp float alphaDivisor = a + step(a, 0.0); // Protect against a divide-by-zero blacking out things in the output
outputColor.r = (c1.r * c1.a + c2.r * c2.a * (1.0 - c1.a))/alphaDivisor;
outputColor.g = (c1.g * c1.a + c2.g * c2.a * (1.0 - c1.a))/alphaDivisor;
outputColor.b = (c1.b * c1.a + c2.b * c2.a * (1.0 - c1.a))/alphaDivisor;
outputColor.a = a;
gl_FragColor = outputColor;
}
);
#else
NSString *const kGPUImageNormalBlendFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
varying vec2 textureCoordinate2;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
void main()
{
vec4 c2 = texture2D(inputImageTexture, textureCoordinate);
vec4 c1 = texture2D(inputImageTexture2, textureCoordinate2);
vec4 outputColor;
// outputColor.r = c1.r + c2.r * c2.a * (1.0 - c1.a);
// outputColor.g = c1.g + c2.g * c2.a * (1.0 - c1.a);
// outputColor.b = c1.b + c2.b * c2.a * (1.0 - c1.a);
// outputColor.a = c1.a + c2.a * (1.0 - c1.a);
float a = c1.a + c2.a * (1.0 - c1.a);
float alphaDivisor = a + step(a, 0.0); // Protect against a divide-by-zero blacking out things in the output
outputColor.r = (c1.r * c1.a + c2.r * c2.a * (1.0 - c1.a))/alphaDivisor;
outputColor.g = (c1.g * c1.a + c2.g * c2.a * (1.0 - c1.a))/alphaDivisor;
outputColor.b = (c1.b * c1.a + c2.b * c2.a * (1.0 - c1.a))/alphaDivisor;
outputColor.a = a;
gl_FragColor = outputColor;
}
);
#endif
@implementation GPUImageNormalBlendFilter
- (id)init;
{
if (!(self = [super initWithFragmentShaderFromString:kGPUImageNormalBlendFragmentShaderString]))
{
return nil;
}
return self;
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageNormalBlendFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 1,046 |
```objective-c
#import "GPUImageTwoInputFilter.h"
/** Applies a color burn blend of two images
*/
@interface GPUImageColorBurnBlendFilter : GPUImageTwoInputFilter
{
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageColorBurnBlendFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 39 |
```objective-c
#import "GPUImage3x3TextureSamplingFilter.h"
@interface GPUImageNonMaximumSuppressionFilter : GPUImage3x3TextureSamplingFilter
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageNonMaximumSuppressionFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 33 |
```objective-c
//
// GPUImageHistogramEqualizationFilter.m
// FilterShowcase
//
// Created by Adam Marcus on 19/08/2014.
//
#import "GPUImageHistogramEqualizationFilter.h"
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageRedHistogramEqualizationFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
void main()
{
lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
lowp float redCurveValue = texture2D(inputImageTexture2, vec2(textureColor.r, 0.0)).r;
gl_FragColor = vec4(redCurveValue, textureColor.g, textureColor.b, textureColor.a);
}
);
#else
NSString *const kGPUImageRedHistogramEqualizationFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
void main()
{
vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
float redCurveValue = texture2D(inputImageTexture2, vec2(textureColor.r, 0.0)).r;
gl_FragColor = vec4(redCurveValue, textureColor.g, textureColor.b, textureColor.a);
}
);
#endif
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageGreenHistogramEqualizationFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
void main()
{
lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
lowp float greenCurveValue = texture2D(inputImageTexture2, vec2(textureColor.g, 0.0)).g;
gl_FragColor = vec4(textureColor.r, greenCurveValue, textureColor.b, textureColor.a);
}
);
#else
NSString *const kGPUImageGreenHistogramEqualizationFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
void main()
{
vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
float greenCurveValue = texture2D(inputImageTexture2, vec2(textureColor.g, 0.0)).g;
gl_FragColor = vec4(textureColor.r, greenCurveValue, textureColor.b, textureColor.a);
}
);
#endif
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageBlueHistogramEqualizationFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
void main()
{
lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
lowp float blueCurveValue = texture2D(inputImageTexture2, vec2(textureColor.b, 0.0)).b;
gl_FragColor = vec4(textureColor.r, textureColor.g, blueCurveValue, textureColor.a);
}
);
#else
NSString *const kGPUImageBlueHistogramEqualizationFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
void main()
{
vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
float blueCurveValue = texture2D(inputImageTexture2, vec2(textureColor.b, 0.0)).b;
gl_FragColor = vec4(textureColor.r, textureColor.g, blueCurveValue, textureColor.a);
}
);
#endif
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageRGBHistogramEqualizationFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
void main()
{
lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
lowp float redCurveValue = texture2D(inputImageTexture2, vec2(textureColor.r, 0.0)).r;
lowp float greenCurveValue = texture2D(inputImageTexture2, vec2(textureColor.g, 0.0)).g;
lowp float blueCurveValue = texture2D(inputImageTexture2, vec2(textureColor.b, 0.0)).b;
gl_FragColor = vec4(redCurveValue, greenCurveValue, blueCurveValue, textureColor.a);
}
);
#else
NSString *const kGPUImageRGBHistogramEqualizationFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
void main()
{
vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
float redCurveValue = texture2D(inputImageTexture2, vec2(textureColor.r, 0.0)).r;
float greenCurveValue = texture2D(inputImageTexture2, vec2(textureColor.g, 0.0)).g;
float blueCurveValue = texture2D(inputImageTexture2, vec2(textureColor.b, 0.0)).b;
gl_FragColor = vec4(redCurveValue, greenCurveValue, blueCurveValue, textureColor.a);
}
);
#endif
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageLuminanceHistogramEqualizationFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
const lowp vec3 W = vec3(0.2125, 0.7154, 0.0721);
void main()
{
lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
lowp float luminance = dot(textureColor.rgb, W);
lowp float newLuminance = texture2D(inputImageTexture2, vec2(luminance, 0.0)).r;
lowp float deltaLuminance = newLuminance - luminance;
lowp float red = clamp(textureColor.r + deltaLuminance, 0.0, 1.0);
lowp float green = clamp(textureColor.g + deltaLuminance, 0.0, 1.0);
lowp float blue = clamp(textureColor.b + deltaLuminance, 0.0, 1.0);
gl_FragColor = vec4(red, green, blue, textureColor.a);
}
);
#else
NSString *const kGPUImageLuminanceHistogramEqualizationFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
const vec3 W = vec3(0.2125, 0.7154, 0.0721);
void main()
{
vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
float luminance = dot(textureColor.rgb, W);
float newLuminance = texture2D(inputImageTexture2, vec2(luminance, 0.0)).r;
float deltaLuminance = newLuminance - luminance;
float red = clamp(textureColor.r + deltaLuminance, 0.0, 1.0);
float green = clamp(textureColor.g + deltaLuminance, 0.0, 1.0);
float blue = clamp(textureColor.b + deltaLuminance, 0.0, 1.0);
gl_FragColor = vec4(red, green, blue, textureColor.a);
}
);
#endif
@implementation GPUImageHistogramEqualizationFilter
@synthesize downsamplingFactor = _downsamplingFactor;
#pragma mark -
#pragma mark Initialization
- (id)init;
{
if (!(self = [self initWithHistogramType:kGPUImageHistogramRGB]))
{
return nil;
}
return self;
}
- (id)initWithHistogramType:(GPUImageHistogramType)newHistogramType
{
if (!(self = [super init]))
{
return nil;
}
histogramFilter = [[GPUImageHistogramFilter alloc] initWithHistogramType:newHistogramType];
[self addFilter:histogramFilter];
GLubyte dummyInput[4 * 256]; // NB: No way to initialise GPUImageRawDataInput without providing bytes
rawDataInputFilter = [[GPUImageRawDataInput alloc] initWithBytes:dummyInput size:CGSizeMake(256.0, 1.0) pixelFormat:GPUPixelFormatBGRA type:GPUPixelTypeUByte];
rawDataOutputFilter = [[GPUImageRawDataOutput alloc] initWithImageSize:CGSizeMake(256.0, 3.0) resultsInBGRAFormat:YES];
__unsafe_unretained GPUImageRawDataOutput *_rawDataOutputFilter = rawDataOutputFilter;
__unsafe_unretained GPUImageRawDataInput *_rawDataInputFilter = rawDataInputFilter;
[rawDataOutputFilter setNewFrameAvailableBlock:^{
unsigned int histogramBins[3][256];
[_rawDataOutputFilter lockFramebufferForReading];
GLubyte *data = [_rawDataOutputFilter rawBytesForImage];
data += [_rawDataOutputFilter bytesPerRowInOutput];
histogramBins[0][0] = *data++;
histogramBins[1][0] = *data++;
histogramBins[2][0] = *data++;
data++;
for (unsigned int x = 1; x < 256; x++) {
histogramBins[0][x] = histogramBins[0][x-1] + *data++;
histogramBins[1][x] = histogramBins[1][x-1] + *data++;
histogramBins[2][x] = histogramBins[2][x-1] + *data++;
data++;
}
[_rawDataOutputFilter unlockFramebufferAfterReading];
GLubyte colorMapping[4 * 256];
GLubyte *_colorMapping = colorMapping;
for (unsigned int x = 0; x < 256; x++) {
*_colorMapping++ = (GLubyte) (((histogramBins[0][x] - histogramBins[0][0]) * 255) / histogramBins[0][255]);
*_colorMapping++ = (GLubyte) (((histogramBins[1][x] - histogramBins[1][0]) * 255) / histogramBins[1][255]);
*_colorMapping++ = (GLubyte) (((histogramBins[2][x] - histogramBins[2][0]) * 255) / histogramBins[2][255]);
*_colorMapping++ = 255;
}
_colorMapping = colorMapping;
[_rawDataInputFilter updateDataFromBytes:_colorMapping size:CGSizeMake(256.0, 1.0)];
[_rawDataInputFilter processData];
}];
[histogramFilter addTarget:rawDataOutputFilter];
NSString *fragmentShader = nil;
switch (newHistogramType) {
case kGPUImageHistogramRed:
fragmentShader = kGPUImageRedHistogramEqualizationFragmentShaderString;
break;
case kGPUImageHistogramGreen:
fragmentShader = kGPUImageGreenHistogramEqualizationFragmentShaderString;
break;
case kGPUImageHistogramBlue:
fragmentShader = kGPUImageBlueHistogramEqualizationFragmentShaderString;
break;
default:
case kGPUImageHistogramRGB:
fragmentShader = kGPUImageRGBHistogramEqualizationFragmentShaderString;
break;
case kGPUImageHistogramLuminance:
fragmentShader = kGPUImageLuminanceHistogramEqualizationFragmentShaderString;
break;
}
GPUImageFilter *equalizationFilter = [[GPUImageTwoInputFilter alloc] initWithFragmentShaderFromString:fragmentShader];
[rawDataInputFilter addTarget:equalizationFilter atTextureLocation:1];
[self addFilter:equalizationFilter];
self.initialFilters = [NSArray arrayWithObjects:histogramFilter, equalizationFilter, nil];
self.terminalFilter = equalizationFilter;
self.downsamplingFactor = 16;
return self;
}
#pragma mark -
#pragma mark Accessors
- (void)setDownsamplingFactor:(NSUInteger)newValue;
{
if (_downsamplingFactor != newValue)
{
_downsamplingFactor = newValue;
histogramFilter.downsamplingFactor = newValue;
}
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageHistogramEqualizationFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 2,722 |
```objective-c
#import "GPUImageContrastFilter.h"
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageContrastFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform lowp float contrast;
void main()
{
lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
gl_FragColor = vec4(((textureColor.rgb - vec3(0.5)) * contrast + vec3(0.5)), textureColor.w);
}
);
#else
NSString *const kGPUImageContrastFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform float contrast;
void main()
{
vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
gl_FragColor = vec4(((textureColor.rgb - vec3(0.5)) * contrast + vec3(0.5)), textureColor.w);
}
);
#endif
@implementation GPUImageContrastFilter
@synthesize contrast = _contrast;
#pragma mark -
#pragma mark Initialization
- (id)init;
{
if (!(self = [super initWithFragmentShaderFromString:kGPUImageContrastFragmentShaderString]))
{
return nil;
}
contrastUniform = [filterProgram uniformIndex:@"contrast"];
self.contrast = 1.0;
return self;
}
#pragma mark -
#pragma mark Accessors
- (void)setContrast:(CGFloat)newValue;
{
_contrast = newValue;
[self setFloat:_contrast forUniform:contrastUniform program:filterProgram];
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageContrastFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 352 |
```objective-c
#import "GPUImageFilter.h"
/** Adjusts the contrast of the image
*/
@interface GPUImageContrastFilter : GPUImageFilter
{
GLint contrastUniform;
}
/** Contrast ranges from 0.0 to 4.0 (max contrast), with 1.0 as the normal level
*/
@property(readwrite, nonatomic) CGFloat contrast;
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageContrastFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 76 |
```objective-c
#import <Foundation/Foundation.h>
#import <QuartzCore/QuartzCore.h>
#import "GPUImageFramebuffer.h"
@interface GPUImageFramebufferCache : NSObject
// Framebuffer management
- (GPUImageFramebuffer *)fetchFramebufferForSize:(CGSize)framebufferSize textureOptions:(GPUTextureOptions)textureOptions onlyTexture:(BOOL)onlyTexture;
- (GPUImageFramebuffer *)fetchFramebufferForSize:(CGSize)framebufferSize onlyTexture:(BOOL)onlyTexture;
- (void)returnFramebufferToCache:(GPUImageFramebuffer *)framebuffer;
- (void)purgeAllUnassignedFramebuffers;
- (void)addFramebufferToActiveImageCaptureList:(GPUImageFramebuffer *)framebuffer;
- (void)removeFramebufferFromActiveImageCaptureList:(GPUImageFramebuffer *)framebuffer;
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageFramebufferCache.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 160 |
```objective-c
#import "GPUImageToonFilter.h"
#import "GPUImageSobelEdgeDetectionFilter.h"
#import "GPUImage3x3ConvolutionFilter.h"
// Code from "Graphics Shaders: Theory and Practice" by M. Bailey and S. Cunningham
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageToonFragmentShaderString = SHADER_STRING
(
precision highp float;
varying vec2 textureCoordinate;
varying vec2 leftTextureCoordinate;
varying vec2 rightTextureCoordinate;
varying vec2 topTextureCoordinate;
varying vec2 topLeftTextureCoordinate;
varying vec2 topRightTextureCoordinate;
varying vec2 bottomTextureCoordinate;
varying vec2 bottomLeftTextureCoordinate;
varying vec2 bottomRightTextureCoordinate;
uniform sampler2D inputImageTexture;
uniform highp float intensity;
uniform highp float threshold;
uniform highp float quantizationLevels;
const highp vec3 W = vec3(0.2125, 0.7154, 0.0721);
void main()
{
vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r;
float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r;
float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r;
float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r;
float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r;
float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r;
float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r;
float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r;
float h = -topLeftIntensity - 2.0 * topIntensity - topRightIntensity + bottomLeftIntensity + 2.0 * bottomIntensity + bottomRightIntensity;
float v = -bottomLeftIntensity - 2.0 * leftIntensity - topLeftIntensity + bottomRightIntensity + 2.0 * rightIntensity + topRightIntensity;
float mag = length(vec2(h, v));
vec3 posterizedImageColor = floor((textureColor.rgb * quantizationLevels) + 0.5) / quantizationLevels;
float thresholdTest = 1.0 - step(threshold, mag);
gl_FragColor = vec4(posterizedImageColor * thresholdTest, textureColor.a);
}
);
#else
NSString *const kGPUImageToonFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
varying vec2 leftTextureCoordinate;
varying vec2 rightTextureCoordinate;
varying vec2 topTextureCoordinate;
varying vec2 topLeftTextureCoordinate;
varying vec2 topRightTextureCoordinate;
varying vec2 bottomTextureCoordinate;
varying vec2 bottomLeftTextureCoordinate;
varying vec2 bottomRightTextureCoordinate;
uniform sampler2D inputImageTexture;
uniform float intensity;
uniform float threshold;
uniform float quantizationLevels;
const vec3 W = vec3(0.2125, 0.7154, 0.0721);
void main()
{
vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r;
float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r;
float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r;
float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r;
float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r;
float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r;
float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r;
float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r;
float h = -topLeftIntensity - 2.0 * topIntensity - topRightIntensity + bottomLeftIntensity + 2.0 * bottomIntensity + bottomRightIntensity;
float v = -bottomLeftIntensity - 2.0 * leftIntensity - topLeftIntensity + bottomRightIntensity + 2.0 * rightIntensity + topRightIntensity;
float mag = length(vec2(h, v));
vec3 posterizedImageColor = floor((textureColor.rgb * quantizationLevels) + 0.5) / quantizationLevels;
float thresholdTest = 1.0 - step(threshold, mag);
gl_FragColor = vec4(posterizedImageColor * thresholdTest, textureColor.a);
}
);
#endif
@implementation GPUImageToonFilter
@synthesize threshold = _threshold;
@synthesize quantizationLevels = _quantizationLevels;
#pragma mark -
#pragma mark Initialization and teardown
- (id)init;
{
if (!(self = [super initWithFragmentShaderFromString:kGPUImageToonFragmentShaderString]))
{
return nil;
}
hasOverriddenImageSizeFactor = NO;
thresholdUniform = [filterProgram uniformIndex:@"threshold"];
quantizationLevelsUniform = [filterProgram uniformIndex:@"quantizationLevels"];
self.threshold = 0.2;
self.quantizationLevels = 10.0;
return self;
}
#pragma mark -
#pragma mark Accessors
- (void)setThreshold:(CGFloat)newValue;
{
_threshold = newValue;
[self setFloat:_threshold forUniform:thresholdUniform program:filterProgram];
}
- (void)setQuantizationLevels:(CGFloat)newValue;
{
_quantizationLevels = newValue;
[self setFloat:_quantizationLevels forUniform:quantizationLevelsUniform program:filterProgram];
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageToonFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 1,232 |
```objective-c
#import "GPUImageFilter.h"
// This outputs an image with a constant color. You need to use -forceProcessingAtSize: in order to set the output image
// dimensions, or this won't work correctly
@interface GPUImageSolidColorGenerator : GPUImageFilter
{
GLint colorUniform;
GLint useExistingAlphaUniform;
}
// This color dictates what the output image will be filled with
@property(readwrite, nonatomic) GPUVector4 color;
@property(readwrite, nonatomic, assign) BOOL useExistingAlpha; // whether to use the alpha of the existing image or not, default is NO
- (void)setColorRed:(CGFloat)redComponent green:(CGFloat)greenComponent blue:(CGFloat)blueComponent alpha:(CGFloat)alphaComponent;
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageSolidColorGenerator.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 157 |
```objective-c
#import "GPUImageHarrisCornerDetectionFilter.h"
/** Shi-Tomasi feature detector
This is the Shi-Tomasi feature detector, as described in
J. Shi and C. Tomasi. Good features to track. Proceedings of the IEEE Conference on Computer Vision and Pattern Recognition, pages 593-600, June 1994.
*/
@interface GPUImageShiTomasiFeatureDetectionFilter : GPUImageHarrisCornerDetectionFilter
// Compared to the Harris corner detector, the default sensitivity value for this detector is set to 1.5
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageShiTomasiFeatureDetectionFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 116 |
```objective-c
#import "GPUImage3x3TextureSamplingFilter.h"
@interface GPUImageLocalBinaryPatternFilter : GPUImage3x3TextureSamplingFilter
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageLocalBinaryPatternFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 32 |
```objective-c
#import "GPUImageGaussianSelectiveBlurFilter.h"
#import "GPUImageGaussianBlurFilter.h"
#import "GPUImageTwoInputFilter.h"
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageGaussianSelectiveBlurFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
varying highp vec2 textureCoordinate2;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
uniform lowp float excludeCircleRadius;
uniform lowp vec2 excludeCirclePoint;
uniform lowp float excludeBlurSize;
uniform highp float aspectRatio;
void main()
{
lowp vec4 sharpImageColor = texture2D(inputImageTexture, textureCoordinate);
lowp vec4 blurredImageColor = texture2D(inputImageTexture2, textureCoordinate2);
highp vec2 textureCoordinateToUse = vec2(textureCoordinate2.x, (textureCoordinate2.y * aspectRatio + 0.5 - 0.5 * aspectRatio));
highp float distanceFromCenter = distance(excludeCirclePoint, textureCoordinateToUse);
gl_FragColor = mix(sharpImageColor, blurredImageColor, smoothstep(excludeCircleRadius - excludeBlurSize, excludeCircleRadius, distanceFromCenter));
}
);
#else
NSString *const kGPUImageGaussianSelectiveBlurFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
varying vec2 textureCoordinate2;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
uniform float excludeCircleRadius;
uniform vec2 excludeCirclePoint;
uniform float excludeBlurSize;
uniform float aspectRatio;
void main()
{
vec4 sharpImageColor = texture2D(inputImageTexture, textureCoordinate);
vec4 blurredImageColor = texture2D(inputImageTexture2, textureCoordinate2);
vec2 textureCoordinateToUse = vec2(textureCoordinate2.x, (textureCoordinate2.y * aspectRatio + 0.5 - 0.5 * aspectRatio));
float distanceFromCenter = distance(excludeCirclePoint, textureCoordinateToUse);
gl_FragColor = mix(sharpImageColor, blurredImageColor, smoothstep(excludeCircleRadius - excludeBlurSize, excludeCircleRadius, distanceFromCenter));
}
);
#endif
@implementation GPUImageGaussianSelectiveBlurFilter
@synthesize excludeCirclePoint = _excludeCirclePoint, excludeCircleRadius = _excludeCircleRadius, excludeBlurSize = _excludeBlurSize;
@synthesize blurRadiusInPixels = _blurRadiusInPixels;
@synthesize aspectRatio = _aspectRatio;
- (id)init;
{
if (!(self = [super init]))
{
return nil;
}
hasOverriddenAspectRatio = NO;
// First pass: apply a variable Gaussian blur
blurFilter = [[GPUImageGaussianBlurFilter alloc] init];
[self addFilter:blurFilter];
// Second pass: combine the blurred image with the original sharp one
selectiveFocusFilter = [[GPUImageTwoInputFilter alloc] initWithFragmentShaderFromString:kGPUImageGaussianSelectiveBlurFragmentShaderString];
[self addFilter:selectiveFocusFilter];
// Texture location 0 needs to be the sharp image for both the blur and the second stage processing
[blurFilter addTarget:selectiveFocusFilter atTextureLocation:1];
// To prevent double updating of this filter, disable updates from the sharp image side
self.initialFilters = [NSArray arrayWithObjects:blurFilter, selectiveFocusFilter, nil];
self.terminalFilter = selectiveFocusFilter;
self.blurRadiusInPixels = 5.0;
self.excludeCircleRadius = 60.0/320.0;
self.excludeCirclePoint = CGPointMake(0.5f, 0.5f);
self.excludeBlurSize = 30.0/320.0;
return self;
}
- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;
{
CGSize oldInputSize = inputTextureSize;
[super setInputSize:newSize atIndex:textureIndex];
inputTextureSize = newSize;
if ( (!CGSizeEqualToSize(oldInputSize, inputTextureSize)) && (!hasOverriddenAspectRatio) && (!CGSizeEqualToSize(newSize, CGSizeZero)) )
{
_aspectRatio = (inputTextureSize.width / inputTextureSize.height);
[selectiveFocusFilter setFloat:_aspectRatio forUniformName:@"aspectRatio"];
}
}
#pragma mark -
#pragma mark Accessors
- (void)setBlurRadiusInPixels:(CGFloat)newValue;
{
blurFilter.blurRadiusInPixels = newValue;
}
- (CGFloat)blurRadiusInPixels;
{
return blurFilter.blurRadiusInPixels;
}
- (void)setExcludeCirclePoint:(CGPoint)newValue;
{
_excludeCirclePoint = newValue;
[selectiveFocusFilter setPoint:newValue forUniformName:@"excludeCirclePoint"];
}
- (void)setExcludeCircleRadius:(CGFloat)newValue;
{
_excludeCircleRadius = newValue;
[selectiveFocusFilter setFloat:newValue forUniformName:@"excludeCircleRadius"];
}
- (void)setExcludeBlurSize:(CGFloat)newValue;
{
_excludeBlurSize = newValue;
[selectiveFocusFilter setFloat:newValue forUniformName:@"excludeBlurSize"];
}
- (void)setAspectRatio:(CGFloat)newValue;
{
hasOverriddenAspectRatio = YES;
_aspectRatio = newValue;
[selectiveFocusFilter setFloat:_aspectRatio forUniformName:@"aspectRatio"];
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageGaussianSelectiveBlurFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 1,163 |
```objective-c
#import "GPUImageMaskFilter.h"
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageMaskShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
varying highp vec2 textureCoordinate2;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
void main()
{
lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
lowp vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);
//Averages mask's the RGB values, and scales that value by the mask's alpha
//
//The dot product should take fewer cycles than doing an average normally
//
//Typical/ideal case, R,G, and B will be the same, and Alpha will be 1.0
lowp float newAlpha = dot(textureColor2.rgb, vec3(.33333334, .33333334, .33333334)) * textureColor2.a;
gl_FragColor = vec4(textureColor.xyz, newAlpha);
// gl_FragColor = vec4(textureColor2);
}
);
#else
NSString *const kGPUImageMaskShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
varying vec2 textureCoordinate2;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
void main()
{
vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);
//Averages mask's the RGB values, and scales that value by the mask's alpha
//
//The dot product should take fewer cycles than doing an average normally
//
//Typical/ideal case, R,G, and B will be the same, and Alpha will be 1.0
float newAlpha = dot(textureColor2.rgb, vec3(.33333334, .33333334, .33333334)) * textureColor2.a;
gl_FragColor = vec4(textureColor.xyz, newAlpha);
// gl_FragColor = vec4(textureColor2);
}
);
#endif
@implementation GPUImageMaskFilter
- (id)init;
{
if (!(self = [super initWithFragmentShaderFromString:kGPUImageMaskShaderString]))
{
return nil;
}
return self;
}
- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;
{
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
[super renderToTextureWithVertices:vertices textureCoordinates:textureCoordinates];
glDisable(GL_BLEND);
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageMaskFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 585 |
```objective-c
#import "GPUImageFilter.h"
/** Creates a swirl distortion on the image
*/
@interface GPUImageSwirlFilter : GPUImageFilter
{
GLint radiusUniform, centerUniform, angleUniform;
}
/// The center about which to apply the distortion, with a default of (0.5, 0.5)
@property(readwrite, nonatomic) CGPoint center;
/// The radius of the distortion, ranging from 0.0 to 1.0, with a default of 0.5
@property(readwrite, nonatomic) CGFloat radius;
/// The amount of distortion to apply, with a minimum of 0.0 and a default of 1.0
@property(readwrite, nonatomic) CGFloat angle;
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageSwirlFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 148 |
```objective-c
#import "GPUImageChromaKeyFilter.h"
// Shader code based on Apple's CIChromaKeyFilter example: path_to_url#samplecode/CIChromaKeyFilter/Introduction/Intro.html
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageChromaKeyFragmentShaderString = SHADER_STRING
(
precision highp float;
varying highp vec2 textureCoordinate;
uniform float thresholdSensitivity;
uniform float smoothing;
uniform vec3 colorToReplace;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
void main()
{
vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
float maskY = 0.2989 * colorToReplace.r + 0.5866 * colorToReplace.g + 0.1145 * colorToReplace.b;
float maskCr = 0.7132 * (colorToReplace.r - maskY);
float maskCb = 0.5647 * (colorToReplace.b - maskY);
float Y = 0.2989 * textureColor.r + 0.5866 * textureColor.g + 0.1145 * textureColor.b;
float Cr = 0.7132 * (textureColor.r - Y);
float Cb = 0.5647 * (textureColor.b - Y);
// float blendValue = 1.0 - smoothstep(thresholdSensitivity - smoothing, thresholdSensitivity , abs(Cr - maskCr) + abs(Cb - maskCb));
float blendValue = smoothstep(thresholdSensitivity, thresholdSensitivity + smoothing, distance(vec2(Cr, Cb), vec2(maskCr, maskCb)));
gl_FragColor = vec4(textureColor.rgb, textureColor.a * blendValue);
}
);
#else
NSString *const kGPUImageChromaKeyFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
uniform float thresholdSensitivity;
uniform float smoothing;
uniform vec3 colorToReplace;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
void main()
{
vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
float maskY = 0.2989 * colorToReplace.r + 0.5866 * colorToReplace.g + 0.1145 * colorToReplace.b;
float maskCr = 0.7132 * (colorToReplace.r - maskY);
float maskCb = 0.5647 * (colorToReplace.b - maskY);
float Y = 0.2989 * textureColor.r + 0.5866 * textureColor.g + 0.1145 * textureColor.b;
float Cr = 0.7132 * (textureColor.r - Y);
float Cb = 0.5647 * (textureColor.b - Y);
// float blendValue = 1.0 - smoothstep(thresholdSensitivity - smoothing, thresholdSensitivity , abs(Cr - maskCr) + abs(Cb - maskCb));
float blendValue = smoothstep(thresholdSensitivity, thresholdSensitivity + smoothing, distance(vec2(Cr, Cb), vec2(maskCr, maskCb)));
gl_FragColor = vec4(textureColor.rgb, textureColor.a * blendValue);
}
);
#endif
@implementation GPUImageChromaKeyFilter
@synthesize thresholdSensitivity = _thresholdSensitivity;
@synthesize smoothing = _smoothing;
- (id)init;
{
if (!(self = [super initWithFragmentShaderFromString:kGPUImageChromaKeyFragmentShaderString]))
{
return nil;
}
thresholdSensitivityUniform = [filterProgram uniformIndex:@"thresholdSensitivity"];
smoothingUniform = [filterProgram uniformIndex:@"smoothing"];
colorToReplaceUniform = [filterProgram uniformIndex:@"colorToReplace"];
self.thresholdSensitivity = 0.4;
self.smoothing = 0.1;
[self setColorToReplaceRed:0.0 green:1.0 blue:0.0];
return self;
}
#pragma mark -
#pragma mark Accessors
- (void)setColorToReplaceRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent;
{
GPUVector3 colorToReplace = {redComponent, greenComponent, blueComponent};
[self setVec3:colorToReplace forUniform:colorToReplaceUniform program:filterProgram];
}
- (void)setThresholdSensitivity:(CGFloat)newValue;
{
_thresholdSensitivity = newValue;
[self setFloat:(GLfloat)_thresholdSensitivity forUniform:thresholdSensitivityUniform program:filterProgram];
}
- (void)setSmoothing:(CGFloat)newValue;
{
_smoothing = newValue;
[self setFloat:(GLfloat)_smoothing forUniform:smoothingUniform program:filterProgram];
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageChromaKeyFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 1,042 |
```objective-c
#import "GPUImageOutput.h"
@interface GPUImageTextureInput : GPUImageOutput
{
CGSize textureSize;
}
// Initialization and teardown
- (id)initWithTexture:(GLuint)newInputTexture size:(CGSize)newTextureSize;
// Image rendering
- (void)processTextureWithFrameTime:(CMTime)frameTime;
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageTextureInput.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 68 |
```objective-c
#import "GPUImageTwoInputFilter.h"
@interface GPUImageDarkenBlendFilter : GPUImageTwoInputFilter
{
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageDarkenBlendFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 28 |
```objective-c
#import "GPUImageTwoInputFilter.h"
@interface GPUImageOverlayBlendFilter : GPUImageTwoInputFilter
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageOverlayBlendFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 25 |
```objective-c
#import "GPUImageCropFilter.h"
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageCropFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
void main()
{
gl_FragColor = texture2D(inputImageTexture, textureCoordinate);
}
);
#else
NSString *const kGPUImageCropFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
void main()
{
gl_FragColor = texture2D(inputImageTexture, textureCoordinate);
}
);
#endif
@interface GPUImageCropFilter ()
- (void)calculateCropTextureCoordinates;
@end
@interface GPUImageCropFilter()
{
CGSize originallySuppliedInputSize;
}
@end
@implementation GPUImageCropFilter
@synthesize cropRegion = _cropRegion;
#pragma mark -
#pragma mark Initialization and teardown
- (id)initWithCropRegion:(CGRect)newCropRegion;
{
if (!(self = [super initWithFragmentShaderFromString:kGPUImageCropFragmentShaderString]))
{
return nil;
}
self.cropRegion = newCropRegion;
return self;
}
- (id)init;
{
if (!(self = [self initWithCropRegion:CGRectMake(0.0, 0.0, 1.0, 1.0)]))
{
return nil;
}
return self;
}
#pragma mark -
#pragma mark Rendering
- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;
{
if (self.preventRendering)
{
return;
}
// if (overrideInputSize)
// {
// if (CGSizeEqualToSize(forcedMaximumSize, CGSizeZero))
// {
// return;
// }
// else
// {
// CGRect insetRect = AVMakeRectWithAspectRatioInsideRect(newSize, CGRectMake(0.0, 0.0, forcedMaximumSize.width, forcedMaximumSize.height));
// inputTextureSize = insetRect.size;
// return;
// }
// }
CGSize rotatedSize = [self rotatedSize:newSize forIndex:textureIndex];
originallySuppliedInputSize = rotatedSize;
CGSize scaledSize;
scaledSize.width = rotatedSize.width * _cropRegion.size.width;
scaledSize.height = rotatedSize.height * _cropRegion.size.height;
if (CGSizeEqualToSize(scaledSize, CGSizeZero))
{
inputTextureSize = scaledSize;
}
else if (!CGSizeEqualToSize(inputTextureSize, scaledSize))
{
inputTextureSize = scaledSize;
}
}
#pragma mark -
#pragma mark GPUImageInput
- (void)calculateCropTextureCoordinates;
{
CGFloat minX = _cropRegion.origin.x;
CGFloat minY = _cropRegion.origin.y;
CGFloat maxX = CGRectGetMaxX(_cropRegion);
CGFloat maxY = CGRectGetMaxY(_cropRegion);
switch(inputRotation)
{
case kGPUImageNoRotation: // Works
{
cropTextureCoordinates[0] = minX; // 0,0
cropTextureCoordinates[1] = minY;
cropTextureCoordinates[2] = maxX; // 1,0
cropTextureCoordinates[3] = minY;
cropTextureCoordinates[4] = minX; // 0,1
cropTextureCoordinates[5] = maxY;
cropTextureCoordinates[6] = maxX; // 1,1
cropTextureCoordinates[7] = maxY;
}; break;
case kGPUImageRotateLeft: // Fixed
{
cropTextureCoordinates[0] = maxY; // 1,0
cropTextureCoordinates[1] = 1.0 - maxX;
cropTextureCoordinates[2] = maxY; // 1,1
cropTextureCoordinates[3] = 1.0 - minX;
cropTextureCoordinates[4] = minY; // 0,0
cropTextureCoordinates[5] = 1.0 - maxX;
cropTextureCoordinates[6] = minY; // 0,1
cropTextureCoordinates[7] = 1.0 - minX;
}; break;
case kGPUImageRotateRight: // Fixed
{
cropTextureCoordinates[0] = minY; // 0,1
cropTextureCoordinates[1] = 1.0 - minX;
cropTextureCoordinates[2] = minY; // 0,0
cropTextureCoordinates[3] = 1.0 - maxX;
cropTextureCoordinates[4] = maxY; // 1,1
cropTextureCoordinates[5] = 1.0 - minX;
cropTextureCoordinates[6] = maxY; // 1,0
cropTextureCoordinates[7] = 1.0 - maxX;
}; break;
case kGPUImageFlipVertical: // Works for me
{
cropTextureCoordinates[0] = minX; // 0,1
cropTextureCoordinates[1] = maxY;
cropTextureCoordinates[2] = maxX; // 1,1
cropTextureCoordinates[3] = maxY;
cropTextureCoordinates[4] = minX; // 0,0
cropTextureCoordinates[5] = minY;
cropTextureCoordinates[6] = maxX; // 1,0
cropTextureCoordinates[7] = minY;
}; break;
case kGPUImageFlipHorizonal: // Works for me
{
cropTextureCoordinates[0] = maxX; // 1,0
cropTextureCoordinates[1] = minY;
cropTextureCoordinates[2] = minX; // 0,0
cropTextureCoordinates[3] = minY;
cropTextureCoordinates[4] = maxX; // 1,1
cropTextureCoordinates[5] = maxY;
cropTextureCoordinates[6] = minX; // 0,1
cropTextureCoordinates[7] = maxY;
}; break;
case kGPUImageRotate180: // Fixed
{
cropTextureCoordinates[0] = maxX; // 1,1
cropTextureCoordinates[1] = maxY;
cropTextureCoordinates[2] = minX; // 0,1
cropTextureCoordinates[3] = maxY;
cropTextureCoordinates[4] = maxX; // 1,0
cropTextureCoordinates[5] = minY;
cropTextureCoordinates[6] = minX; // 0,0
cropTextureCoordinates[7] = minY;
}; break;
case kGPUImageRotateRightFlipVertical: // Fixed
{
cropTextureCoordinates[0] = minY; // 0,0
cropTextureCoordinates[1] = 1.0 - maxX;
cropTextureCoordinates[2] = minY; // 0,1
cropTextureCoordinates[3] = 1.0 - minX;
cropTextureCoordinates[4] = maxY; // 1,0
cropTextureCoordinates[5] = 1.0 - maxX;
cropTextureCoordinates[6] = maxY; // 1,1
cropTextureCoordinates[7] = 1.0 - minX;
}; break;
case kGPUImageRotateRightFlipHorizontal: // Fixed
{
cropTextureCoordinates[0] = maxY; // 1,1
cropTextureCoordinates[1] = 1.0 - minX;
cropTextureCoordinates[2] = maxY; // 1,0
cropTextureCoordinates[3] = 1.0 - maxX;
cropTextureCoordinates[4] = minY; // 0,1
cropTextureCoordinates[5] = 1.0 - minX;
cropTextureCoordinates[6] = minY; // 0,0
cropTextureCoordinates[7] = 1.0 - maxX;
}; break;
}
}
- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex;
{
static const GLfloat cropSquareVertices[] = {
-1.0f, -1.0f,
1.0f, -1.0f,
-1.0f, 1.0f,
1.0f, 1.0f,
};
[self renderToTextureWithVertices:cropSquareVertices textureCoordinates:cropTextureCoordinates];
[self informTargetsAboutNewFrameAtTime:frameTime];
}
#pragma mark -
#pragma mark Accessors
- (void)setCropRegion:(CGRect)newValue;
{
NSParameterAssert(newValue.origin.x >= 0 && newValue.origin.x <= 1 &&
newValue.origin.y >= 0 && newValue.origin.y <= 1 &&
newValue.size.width >= 0 && newValue.size.width <= 1 &&
newValue.size.height >= 0 && newValue.size.height <= 1);
_cropRegion = newValue;
[self calculateCropTextureCoordinates];
}
- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;
{
[super setInputRotation:newInputRotation atIndex:textureIndex];
[self calculateCropTextureCoordinates];
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageCropFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 1,960 |
```objective-c
#import "GPUImageThresholdSketchFilter.h"
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageThresholdSketchFragmentShaderString = SHADER_STRING
(
precision highp float;
varying vec2 textureCoordinate;
varying vec2 leftTextureCoordinate;
varying vec2 rightTextureCoordinate;
varying vec2 topTextureCoordinate;
varying vec2 topLeftTextureCoordinate;
varying vec2 topRightTextureCoordinate;
varying vec2 bottomTextureCoordinate;
varying vec2 bottomLeftTextureCoordinate;
varying vec2 bottomRightTextureCoordinate;
uniform sampler2D inputImageTexture;
uniform lowp float threshold;
uniform float edgeStrength;
const highp vec3 W = vec3(0.2125, 0.7154, 0.0721);
void main()
{
float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r;
float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r;
float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r;
float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r;
float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r;
float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r;
float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r;
float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r;
float h = -topLeftIntensity - 2.0 * topIntensity - topRightIntensity + bottomLeftIntensity + 2.0 * bottomIntensity + bottomRightIntensity;
float v = -bottomLeftIntensity - 2.0 * leftIntensity - topLeftIntensity + bottomRightIntensity + 2.0 * rightIntensity + topRightIntensity;
float mag = (length(vec2(h, v)) * edgeStrength);
mag = step(threshold, mag);
mag = 1.0 - mag;
gl_FragColor = vec4(vec3(mag), 1.0);
}
);
#else
NSString *const kGPUImageThresholdSketchFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
varying vec2 leftTextureCoordinate;
varying vec2 rightTextureCoordinate;
varying vec2 topTextureCoordinate;
varying vec2 topLeftTextureCoordinate;
varying vec2 topRightTextureCoordinate;
varying vec2 bottomTextureCoordinate;
varying vec2 bottomLeftTextureCoordinate;
varying vec2 bottomRightTextureCoordinate;
uniform sampler2D inputImageTexture;
uniform float threshold;
uniform float edgeStrength;
const vec3 W = vec3(0.2125, 0.7154, 0.0721);
void main()
{
float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r;
float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r;
float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r;
float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r;
float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r;
float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r;
float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r;
float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r;
float h = -topLeftIntensity - 2.0 * topIntensity - topRightIntensity + bottomLeftIntensity + 2.0 * bottomIntensity + bottomRightIntensity;
float v = -bottomLeftIntensity - 2.0 * leftIntensity - topLeftIntensity + bottomRightIntensity + 2.0 * rightIntensity + topRightIntensity;
float mag = 1.0 - length(vec2(h, v) * edgeStrength);
mag = step(threshold, mag);
gl_FragColor = vec4(vec3(mag), 1.0);
}
);
#endif
@implementation GPUImageThresholdSketchFilter
#pragma mark -
#pragma mark Initialization and teardown
- (id)init;
{
if (!(self = [self initWithFragmentShaderFromString:kGPUImageThresholdSketchFragmentShaderString]))
{
return nil;
}
return self;
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageThresholdSketchFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 912 |
```objective-c
#import "GPUImageFilter.h"
@interface GPUImageColorInvertFilter : GPUImageFilter
{
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageColorInvertFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 24 |
```objective-c
#import "GPUImageMultiplyBlendFilter.h"
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageMultiplyBlendFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
varying highp vec2 textureCoordinate2;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
void main()
{
lowp vec4 base = texture2D(inputImageTexture, textureCoordinate);
lowp vec4 overlayer = texture2D(inputImageTexture2, textureCoordinate2);
gl_FragColor = overlayer * base + overlayer * (1.0 - base.a) + base * (1.0 - overlayer.a);
}
);
#else
NSString *const kGPUImageMultiplyBlendFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
varying vec2 textureCoordinate2;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
void main()
{
vec4 base = texture2D(inputImageTexture, textureCoordinate);
vec4 overlayer = texture2D(inputImageTexture2, textureCoordinate2);
gl_FragColor = overlayer * base + overlayer * (1.0 - base.a) + base * (1.0 - overlayer.a);
}
);
#endif
@implementation GPUImageMultiplyBlendFilter
- (id)init;
{
if (!(self = [super initWithFragmentShaderFromString:kGPUImageMultiplyBlendFragmentShaderString]))
{
return nil;
}
return self;
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageMultiplyBlendFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 328 |
```objective-c
// This is Jeff LaMarche's GLProgram OpenGL shader wrapper class from his OpenGL ES 2.0 book.
// A description of this can be found at his page on the topic:
// path_to_url
// I've extended this to be able to take programs as NSStrings in addition to files, for baked-in shaders
#import <Foundation/Foundation.h>
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
#import <OpenGLES/ES2/gl.h>
#import <OpenGLES/ES2/glext.h>
#else
#import <OpenGL/OpenGL.h>
#import <OpenGL/gl.h>
#endif
@interface GLProgram : NSObject
{
NSMutableArray *attributes;
NSMutableArray *uniforms;
GLuint program,
vertShader,
fragShader;
}
@property(readwrite, nonatomic) BOOL initialized;
@property(readwrite, copy, nonatomic) NSString *vertexShaderLog;
@property(readwrite, copy, nonatomic) NSString *fragmentShaderLog;
@property(readwrite, copy, nonatomic) NSString *programLog;
- (id)initWithVertexShaderString:(NSString *)vShaderString
fragmentShaderString:(NSString *)fShaderString;
- (id)initWithVertexShaderString:(NSString *)vShaderString
fragmentShaderFilename:(NSString *)fShaderFilename;
- (id)initWithVertexShaderFilename:(NSString *)vShaderFilename
fragmentShaderFilename:(NSString *)fShaderFilename;
- (void)addAttribute:(NSString *)attributeName;
- (GLuint)attributeIndex:(NSString *)attributeName;
- (GLuint)uniformIndex:(NSString *)uniformName;
- (BOOL)link;
- (void)use;
- (void)validate;
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GLProgram.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 349 |
```objective-c
#import "GPUImageHueBlendFilter.h"
/**
* Hue blend mode based upon pseudo code from the PDF specification.
*/
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageHueBlendFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
varying highp vec2 textureCoordinate2;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
highp float lum(lowp vec3 c) {
return dot(c, vec3(0.3, 0.59, 0.11));
}
lowp vec3 clipcolor(lowp vec3 c) {
highp float l = lum(c);
lowp float n = min(min(c.r, c.g), c.b);
lowp float x = max(max(c.r, c.g), c.b);
if (n < 0.0) {
c.r = l + ((c.r - l) * l) / (l - n);
c.g = l + ((c.g - l) * l) / (l - n);
c.b = l + ((c.b - l) * l) / (l - n);
}
if (x > 1.0) {
c.r = l + ((c.r - l) * (1.0 - l)) / (x - l);
c.g = l + ((c.g - l) * (1.0 - l)) / (x - l);
c.b = l + ((c.b - l) * (1.0 - l)) / (x - l);
}
return c;
}
lowp vec3 setlum(lowp vec3 c, highp float l) {
highp float d = l - lum(c);
c = c + vec3(d);
return clipcolor(c);
}
highp float sat(lowp vec3 c) {
lowp float n = min(min(c.r, c.g), c.b);
lowp float x = max(max(c.r, c.g), c.b);
return x - n;
}
lowp float mid(lowp float cmin, lowp float cmid, lowp float cmax, highp float s) {
return ((cmid - cmin) * s) / (cmax - cmin);
}
lowp vec3 setsat(lowp vec3 c, highp float s) {
if (c.r > c.g) {
if (c.r > c.b) {
if (c.g > c.b) {
/* g is mid, b is min */
c.g = mid(c.b, c.g, c.r, s);
c.b = 0.0;
} else {
/* b is mid, g is min */
c.b = mid(c.g, c.b, c.r, s);
c.g = 0.0;
}
c.r = s;
} else {
/* b is max, r is mid, g is min */
c.r = mid(c.g, c.r, c.b, s);
c.b = s;
c.r = 0.0;
}
} else if (c.r > c.b) {
/* g is max, r is mid, b is min */
c.r = mid(c.b, c.r, c.g, s);
c.g = s;
c.b = 0.0;
} else if (c.g > c.b) {
/* g is max, b is mid, r is min */
c.b = mid(c.r, c.b, c.g, s);
c.g = s;
c.r = 0.0;
} else if (c.b > c.g) {
/* b is max, g is mid, r is min */
c.g = mid(c.r, c.g, c.b, s);
c.b = s;
c.r = 0.0;
} else {
c = vec3(0.0);
}
return c;
}
void main()
{
highp vec4 baseColor = texture2D(inputImageTexture, textureCoordinate);
highp vec4 overlayColor = texture2D(inputImageTexture2, textureCoordinate2);
gl_FragColor = vec4(baseColor.rgb * (1.0 - overlayColor.a) + setlum(setsat(overlayColor.rgb, sat(baseColor.rgb)), lum(baseColor.rgb)) * overlayColor.a, baseColor.a);
}
);
#else
NSString *const kGPUImageHueBlendFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
varying vec2 textureCoordinate2;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
float lum(vec3 c) {
return dot(c, vec3(0.3, 0.59, 0.11));
}
vec3 clipcolor(vec3 c) {
float l = lum(c);
float n = min(min(c.r, c.g), c.b);
float x = max(max(c.r, c.g), c.b);
if (n < 0.0) {
c.r = l + ((c.r - l) * l) / (l - n);
c.g = l + ((c.g - l) * l) / (l - n);
c.b = l + ((c.b - l) * l) / (l - n);
}
if (x > 1.0) {
c.r = l + ((c.r - l) * (1.0 - l)) / (x - l);
c.g = l + ((c.g - l) * (1.0 - l)) / (x - l);
c.b = l + ((c.b - l) * (1.0 - l)) / (x - l);
}
return c;
}
vec3 setlum(vec3 c, float l) {
float d = l - lum(c);
c = c + vec3(d);
return clipcolor(c);
}
float sat(vec3 c) {
float n = min(min(c.r, c.g), c.b);
float x = max(max(c.r, c.g), c.b);
return x - n;
}
float mid(float cmin, float cmid, float cmax, float s) {
return ((cmid - cmin) * s) / (cmax - cmin);
}
vec3 setsat(vec3 c, float s) {
if (c.r > c.g) {
if (c.r > c.b) {
if (c.g > c.b) {
/* g is mid, b is min */
c.g = mid(c.b, c.g, c.r, s);
c.b = 0.0;
} else {
/* b is mid, g is min */
c.b = mid(c.g, c.b, c.r, s);
c.g = 0.0;
}
c.r = s;
} else {
/* b is max, r is mid, g is min */
c.r = mid(c.g, c.r, c.b, s);
c.b = s;
c.r = 0.0;
}
} else if (c.r > c.b) {
/* g is max, r is mid, b is min */
c.r = mid(c.b, c.r, c.g, s);
c.g = s;
c.b = 0.0;
} else if (c.g > c.b) {
/* g is max, b is mid, r is min */
c.b = mid(c.r, c.b, c.g, s);
c.g = s;
c.r = 0.0;
} else if (c.b > c.g) {
/* b is max, g is mid, r is min */
c.g = mid(c.r, c.g, c.b, s);
c.b = s;
c.r = 0.0;
} else {
c = vec3(0.0);
}
return c;
}
void main()
{
vec4 baseColor = texture2D(inputImageTexture, textureCoordinate);
vec4 overlayColor = texture2D(inputImageTexture2, textureCoordinate2);
gl_FragColor = vec4(baseColor.rgb * (1.0 - overlayColor.a) + setlum(setsat(overlayColor.rgb, sat(baseColor.rgb)), lum(baseColor.rgb)) * overlayColor.a, baseColor.a);
}
);
#endif
@implementation GPUImageHueBlendFilter
- (id)init;
{
if (!(self = [super initWithFragmentShaderFromString:kGPUImageHueBlendFragmentShaderString]))
{
return nil;
}
return self;
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageHueBlendFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 1,917 |
```objective-c
#import "GPUImageSobelEdgeDetectionFilter.h"
@interface GPUImageXYDerivativeFilter : GPUImageSobelEdgeDetectionFilter
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageXYDerivativeFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 30 |
```objective-c
#import "GPUImageTwoInputFilter.h"
@interface GPUImageDissolveBlendFilter : GPUImageTwoInputFilter
{
GLint mixUniform;
}
// Mix ranges from 0.0 (only image 1) to 1.0 (only image 2), with 0.5 (half of either) as the normal level
@property(readwrite, nonatomic) CGFloat mix;
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageDissolveBlendFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 84 |
```objective-c
#import "GPUImageHighlightShadowFilter.h"
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageHighlightShadowFragmentShaderString = SHADER_STRING
(
uniform sampler2D inputImageTexture;
varying highp vec2 textureCoordinate;
uniform lowp float shadows;
uniform lowp float highlights;
const mediump vec3 luminanceWeighting = vec3(0.3, 0.3, 0.3);
void main()
{
lowp vec4 source = texture2D(inputImageTexture, textureCoordinate);
mediump float luminance = dot(source.rgb, luminanceWeighting);
mediump float shadow = clamp((pow(luminance, 1.0/(shadows+1.0)) + (-0.76)*pow(luminance, 2.0/(shadows+1.0))) - luminance, 0.0, 1.0);
mediump float highlight = clamp((1.0 - (pow(1.0-luminance, 1.0/(2.0-highlights)) + (-0.8)*pow(1.0-luminance, 2.0/(2.0-highlights)))) - luminance, -1.0, 0.0);
lowp vec3 result = vec3(0.0, 0.0, 0.0) + ((luminance + shadow + highlight) - 0.0) * ((source.rgb - vec3(0.0, 0.0, 0.0))/(luminance - 0.0));
gl_FragColor = vec4(result.rgb, source.a);
}
);
#else
NSString *const kGPUImageHighlightShadowFragmentShaderString = SHADER_STRING
(
uniform sampler2D inputImageTexture;
varying vec2 textureCoordinate;
uniform float shadows;
uniform float highlights;
const vec3 luminanceWeighting = vec3(0.3, 0.3, 0.3);
void main()
{
vec4 source = texture2D(inputImageTexture, textureCoordinate);
float luminance = dot(source.rgb, luminanceWeighting);
float shadow = clamp((pow(luminance, 1.0/(shadows+1.0)) + (-0.76)*pow(luminance, 2.0/(shadows+1.0))) - luminance, 0.0, 1.0);
float highlight = clamp((1.0 - (pow(1.0-luminance, 1.0/(2.0-highlights)) + (-0.8)*pow(1.0-luminance, 2.0/(2.0-highlights)))) - luminance, -1.0, 0.0);
vec3 result = vec3(0.0, 0.0, 0.0) + ((luminance + shadow + highlight) - 0.0) * ((source.rgb - vec3(0.0, 0.0, 0.0))/(luminance - 0.0));
gl_FragColor = vec4(result.rgb, source.a);
}
);
#endif
@implementation GPUImageHighlightShadowFilter
@synthesize shadows = _shadows;
@synthesize highlights = _highlights;
#pragma mark -
#pragma mark Initialization and teardown
- (id)init;
{
if (!(self = [super initWithFragmentShaderFromString:kGPUImageHighlightShadowFragmentShaderString]))
{
return nil;
}
shadowsUniform = [filterProgram uniformIndex:@"shadows"];
highlightsUniform = [filterProgram uniformIndex:@"highlights"];
self.shadows = 0.0;
self.highlights = 1.0;
return self;
}
#pragma mark -
#pragma mark Accessors
- (void)setShadows:(CGFloat)newValue;
{
_shadows = newValue;
[self setFloat:_shadows forUniform:shadowsUniform program:filterProgram];
}
- (void)setHighlights:(CGFloat)newValue;
{
_highlights = newValue;
[self setFloat:_highlights forUniform:highlightsUniform program:filterProgram];
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageHighlightShadowFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 873 |
```objective-c
// This needs a little more work, it's rotating the input tileset and there are some artifacts (I think from GL_LINEAR interpolation), but it's working
#import "GPUImageTwoInputFilter.h"
#import "GPUImagePicture.h"
@interface GPUImageMosaicFilter : GPUImageTwoInputFilter {
GLint inputTileSizeUniform, numTilesUniform, displayTileSizeUniform, colorOnUniform;
GPUImagePicture *pic;
}
// This filter takes an input tileset, the tiles must ascend in luminance
// It looks at the input image and replaces each display tile with an input tile
// according to the luminance of that tile. The idea was to replicate the ASCII
// video filters seen in other apps, but the tileset can be anything.
@property(readwrite, nonatomic) CGSize inputTileSize;
@property(readwrite, nonatomic) float numTiles;
@property(readwrite, nonatomic) CGSize displayTileSize;
@property(readwrite, nonatomic) BOOL colorOn;
@property(readwrite, nonatomic, copy) NSString *tileSet;
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageMosaicFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 222 |
```objective-c
#import "GPUImageFilter.h"
/**
* Created by Alaric Cole
* Allows adjustment of color temperature in terms of what an image was effectively shot in. This means higher Kelvin values will warm the image, while lower values will cool it.
*/
@interface GPUImageWhiteBalanceFilter : GPUImageFilter
{
GLint temperatureUniform, tintUniform;
}
//choose color temperature, in degrees Kelvin
@property(readwrite, nonatomic) CGFloat temperature;
//adjust tint to compensate
@property(readwrite, nonatomic) CGFloat tint;
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageWhiteBalanceFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 109 |
```objective-c
#import "GPUImage3x3TextureSamplingFilter.h"
@interface GPUImageWeakPixelInclusionFilter : GPUImage3x3TextureSamplingFilter
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageWeakPixelInclusionFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 33 |
```objective-c
#import "GPUImageTwoPassTextureSamplingFilter.h"
// For each pixel, this sets it to the minimum value of each color channel in a rectangular neighborhood extending out dilationRadius pixels from the center.
// This extends out dark features, and can be used for abstraction of color images.
@interface GPUImageRGBErosionFilter : GPUImageTwoPassTextureSamplingFilter
// Acceptable values for erosionRadius, which sets the distance in pixels to sample out from the center, are 1, 2, 3, and 4.
- (id)initWithRadius:(NSUInteger)erosionRadius;
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageRGBErosionFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 124 |
```objective-c
#import "GPUImageFilter.h"
@interface GPUImageToneCurveFilter : GPUImageFilter
@property(readwrite, nonatomic, copy) NSArray *redControlPoints;
@property(readwrite, nonatomic, copy) NSArray *greenControlPoints;
@property(readwrite, nonatomic, copy) NSArray *blueControlPoints;
@property(readwrite, nonatomic, copy) NSArray *rgbCompositeControlPoints;
// Initialization and teardown
- (id)initWithACVData:(NSData*)data;
- (id)initWithACV:(NSString*)curveFilename;
- (id)initWithACVURL:(NSURL*)curveFileURL;
// This lets you set all three red, green, and blue tone curves at once.
// NOTE: Deprecated this function because this effect can be accomplished
// using the rgbComposite channel rather then setting all 3 R, G, and B channels.
- (void)setRGBControlPoints:(NSArray *)points DEPRECATED_ATTRIBUTE;
- (void)setPointsWithACV:(NSString*)curveFilename;
- (void)setPointsWithACVURL:(NSURL*)curveFileURL;
// Curve calculation
- (NSMutableArray *)getPreparedSplineCurve:(NSArray *)points;
- (NSMutableArray *)splineCurve:(NSArray *)points;
- (NSMutableArray *)secondDerivative:(NSArray *)cgPoints;
- (void)updateToneCurveTexture;
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageToneCurveFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 273 |
```objective-c
#import "GPUImageVignetteFilter.h"
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageVignetteFragmentShaderString = SHADER_STRING
(
uniform sampler2D inputImageTexture;
varying highp vec2 textureCoordinate;
uniform lowp vec2 vignetteCenter;
uniform lowp vec3 vignetteColor;
uniform highp float vignetteStart;
uniform highp float vignetteEnd;
void main()
{
lowp vec4 sourceImageColor = texture2D(inputImageTexture, textureCoordinate);
lowp float d = distance(textureCoordinate, vec2(vignetteCenter.x, vignetteCenter.y));
lowp float percent = smoothstep(vignetteStart, vignetteEnd, d);
gl_FragColor = vec4(mix(sourceImageColor.rgb, vignetteColor, percent), sourceImageColor.a);
}
);
#else
NSString *const kGPUImageVignetteFragmentShaderString = SHADER_STRING
(
uniform sampler2D inputImageTexture;
varying vec2 textureCoordinate;
uniform vec2 vignetteCenter;
uniform vec3 vignetteColor;
uniform float vignetteStart;
uniform float vignetteEnd;
void main()
{
vec4 sourceImageColor = texture2D(inputImageTexture, textureCoordinate);
float d = distance(textureCoordinate, vec2(vignetteCenter.x, vignetteCenter.y));
float percent = smoothstep(vignetteStart, vignetteEnd, d);
gl_FragColor = vec4(mix(sourceImageColor.rgb, vignetteColor, percent), sourceImageColor.a);
}
);
#endif
@implementation GPUImageVignetteFilter
@synthesize vignetteCenter = _vignetteCenter;
@synthesize vignetteColor = _vignetteColor;
@synthesize vignetteStart =_vignetteStart;
@synthesize vignetteEnd = _vignetteEnd;
#pragma mark -
#pragma mark Initialization and teardown
- (id)init;
{
if (!(self = [super initWithFragmentShaderFromString:kGPUImageVignetteFragmentShaderString]))
{
return nil;
}
vignetteCenterUniform = [filterProgram uniformIndex:@"vignetteCenter"];
vignetteColorUniform = [filterProgram uniformIndex:@"vignetteColor"];
vignetteStartUniform = [filterProgram uniformIndex:@"vignetteStart"];
vignetteEndUniform = [filterProgram uniformIndex:@"vignetteEnd"];
self.vignetteCenter = (CGPoint){ 0.5f, 0.5f };
self.vignetteColor = (GPUVector3){ 0.0f, 0.0f, 0.0f };
self.vignetteStart = 0.3;
self.vignetteEnd = 0.75;
return self;
}
#pragma mark -
#pragma mark Accessors
- (void)setVignetteCenter:(CGPoint)newValue
{
_vignetteCenter = newValue;
[self setPoint:newValue forUniform:vignetteCenterUniform program:filterProgram];
}
- (void)setVignetteColor:(GPUVector3)newValue
{
_vignetteColor = newValue;
[self setVec3:newValue forUniform:vignetteColorUniform program:filterProgram];
}
- (void)setVignetteStart:(CGFloat)newValue;
{
_vignetteStart = newValue;
[self setFloat:_vignetteStart forUniform:vignetteStartUniform program:filterProgram];
}
- (void)setVignetteEnd:(CGFloat)newValue;
{
_vignetteEnd = newValue;
[self setFloat:_vignetteEnd forUniform:vignetteEndUniform program:filterProgram];
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageVignetteFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 779 |
```objective-c
#import "GPUImageGaussianBlurPositionFilter.h"
NSString *const kGPUImageGaussianBlurPositionVertexShaderString = SHADER_STRING
(
attribute vec4 position;
attribute vec4 inputTextureCoordinate;
const int GAUSSIAN_SAMPLES = 9;
uniform float texelWidthOffset;
uniform float texelHeightOffset;
varying vec2 textureCoordinate;
varying vec2 blurCoordinates[GAUSSIAN_SAMPLES];
void main()
{
gl_Position = position;
textureCoordinate = inputTextureCoordinate.xy;
// Calculate the positions for the blur
int multiplier = 0;
vec2 blurStep;
vec2 singleStepOffset = vec2(texelWidthOffset, texelHeightOffset);
for (int i = 0; i < GAUSSIAN_SAMPLES; i++) {
multiplier = (i - ((GAUSSIAN_SAMPLES - 1) / 2));
// Blur in x (horizontal)
blurStep = float(multiplier) * singleStepOffset;
blurCoordinates[i] = inputTextureCoordinate.xy + blurStep;
}
}
);
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageGaussianBlurPositionFragmentShaderString = SHADER_STRING
(
uniform sampler2D inputImageTexture;
const lowp int GAUSSIAN_SAMPLES = 9;
varying highp vec2 textureCoordinate;
varying highp vec2 blurCoordinates[GAUSSIAN_SAMPLES];
uniform highp float aspectRatio;
uniform lowp vec2 blurCenter;
uniform highp float blurRadius;
void main() {
highp vec2 textureCoordinateToUse = vec2(textureCoordinate.x, (textureCoordinate.y * aspectRatio + 0.5 - 0.5 * aspectRatio));
highp float dist = distance(blurCenter, textureCoordinateToUse);
if (dist < blurRadius)
{
lowp vec4 sum = vec4(0.0);
sum += texture2D(inputImageTexture, blurCoordinates[0]) * 0.05;
sum += texture2D(inputImageTexture, blurCoordinates[1]) * 0.09;
sum += texture2D(inputImageTexture, blurCoordinates[2]) * 0.12;
sum += texture2D(inputImageTexture, blurCoordinates[3]) * 0.15;
sum += texture2D(inputImageTexture, blurCoordinates[4]) * 0.18;
sum += texture2D(inputImageTexture, blurCoordinates[5]) * 0.15;
sum += texture2D(inputImageTexture, blurCoordinates[6]) * 0.12;
sum += texture2D(inputImageTexture, blurCoordinates[7]) * 0.09;
sum += texture2D(inputImageTexture, blurCoordinates[8]) * 0.05;
gl_FragColor = sum;
}
else
{
gl_FragColor = texture2D(inputImageTexture, textureCoordinate);
}
}
);
#else
NSString *const kGPUImageGaussianBlurPositionFragmentShaderString = SHADER_STRING
(
uniform sampler2D inputImageTexture;
const int GAUSSIAN_SAMPLES = 9;
varying vec2 textureCoordinate;
varying vec2 blurCoordinates[GAUSSIAN_SAMPLES];
uniform float aspectRatio;
uniform vec2 blurCenter;
uniform float blurRadius;
void main()
{
vec2 textureCoordinateToUse = vec2(textureCoordinate.x, (textureCoordinate.y * aspectRatio + 0.5 - 0.5 * aspectRatio));
float dist = distance(blurCenter, textureCoordinateToUse);
if (dist < blurRadius)
{
vec4 sum = vec4(0.0);
sum += texture2D(inputImageTexture, blurCoordinates[0]) * 0.05;
sum += texture2D(inputImageTexture, blurCoordinates[1]) * 0.09;
sum += texture2D(inputImageTexture, blurCoordinates[2]) * 0.12;
sum += texture2D(inputImageTexture, blurCoordinates[3]) * 0.15;
sum += texture2D(inputImageTexture, blurCoordinates[4]) * 0.18;
sum += texture2D(inputImageTexture, blurCoordinates[5]) * 0.15;
sum += texture2D(inputImageTexture, blurCoordinates[6]) * 0.12;
sum += texture2D(inputImageTexture, blurCoordinates[7]) * 0.09;
sum += texture2D(inputImageTexture, blurCoordinates[8]) * 0.05;
gl_FragColor = sum;
}
else
{
gl_FragColor = texture2D(inputImageTexture, textureCoordinate);
}
}
);
#endif
@interface GPUImageGaussianBlurPositionFilter ()
- (void)adjustAspectRatio;
@property (readwrite, nonatomic) CGFloat aspectRatio;
@end
@implementation GPUImageGaussianBlurPositionFilter
@synthesize blurSize = _blurSize;
@synthesize blurCenter = _blurCenter;
@synthesize aspectRatio = _aspectRatio;
- (id) initWithFirstStageVertexShaderFromString:(NSString *)firstStageVertexShaderString
firstStageFragmentShaderFromString:(NSString *)firstStageFragmentShaderString
secondStageVertexShaderFromString:(NSString *)secondStageVertexShaderString
secondStageFragmentShaderFromString:(NSString *)secondStageFragmentShaderString {
if (!(self = [super initWithFirstStageVertexShaderFromString:firstStageVertexShaderString ? firstStageVertexShaderString : kGPUImageGaussianBlurPositionVertexShaderString
firstStageFragmentShaderFromString:firstStageFragmentShaderString ? firstStageFragmentShaderString : kGPUImageGaussianBlurPositionFragmentShaderString
secondStageVertexShaderFromString:secondStageVertexShaderString ? secondStageVertexShaderString : kGPUImageGaussianBlurPositionVertexShaderString
secondStageFragmentShaderFromString:secondStageFragmentShaderString ? secondStageFragmentShaderString : kGPUImageGaussianBlurPositionFragmentShaderString])) {
return nil;
}
aspectRatioUniform = [secondFilterProgram uniformIndex:@"aspectRatio"];
blurCenterUniform = [secondFilterProgram uniformIndex:@"blurCenter"];
blurRadiusUniform = [secondFilterProgram uniformIndex:@"blurRadius"];
self.blurSize = 1.0;
self.blurRadius = 1.0;
self.blurCenter = CGPointMake(0.5, 0.5);
return self;
}
- (id)init;
{
return [self initWithFirstStageVertexShaderFromString:nil
firstStageFragmentShaderFromString:nil
secondStageVertexShaderFromString:nil
secondStageFragmentShaderFromString:nil];
}
- (void)adjustAspectRatio;
{
if (GPUImageRotationSwapsWidthAndHeight(inputRotation))
{
[self setAspectRatio:(inputTextureSize.width / inputTextureSize.height)];
}
else
{
[self setAspectRatio:(inputTextureSize.height / inputTextureSize.width)];
}
}
- (void)forceProcessingAtSize:(CGSize)frameSize;
{
[super forceProcessingAtSize:frameSize];
[self adjustAspectRatio];
}
- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;
{
CGSize oldInputSize = inputTextureSize;
[super setInputSize:newSize atIndex:textureIndex];
if ( (!CGSizeEqualToSize(oldInputSize, inputTextureSize)) && (!CGSizeEqualToSize(newSize, CGSizeZero)) )
{
[self adjustAspectRatio];
}
}
- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;
{
[super setInputRotation:newInputRotation atIndex:textureIndex];
[self setBlurCenter:self.blurCenter];
[self adjustAspectRatio];
}
#pragma mark -
#pragma mark Accessors
- (void)setBlurSize:(CGFloat)newValue;
{
_blurSize = newValue;
_verticalTexelSpacing = _blurSize;
_horizontalTexelSpacing = _blurSize;
[self setupFilterForSize:[self sizeOfFBO]];
}
- (void) setBlurCenter:(CGPoint)blurCenter;
{
_blurCenter = blurCenter;
CGPoint rotatedPoint = [self rotatedPoint:blurCenter forRotation:inputRotation];
[self setPoint:rotatedPoint forUniform:blurCenterUniform program:secondFilterProgram];
}
- (void) setBlurRadius:(CGFloat)blurRadius;
{
_blurRadius = blurRadius;
[self setFloat:_blurRadius forUniform:blurRadiusUniform program:secondFilterProgram];
}
- (void) setAspectRatio:(CGFloat)newValue;
{
_aspectRatio = newValue;
[self setFloat:_aspectRatio forUniform:aspectRatioUniform program:secondFilterProgram];
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageGaussianBlurPositionFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 1,855 |
```objective-c
#import "GPUImageFilterGroup.h"
@class GPUImagePicture;
/** A photo filter based on Photoshop action by Amatorka
path_to_url
*/
// Note: If you want to use this effect you have to add lookup_amatorka.png
// from Resources folder to your application bundle.
@interface GPUImageAmatorkaFilter : GPUImageFilterGroup
{
GPUImagePicture *lookupImageSource;
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageAmatorkaFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 92 |
```objective-c
#import "GPUImageOpeningFilter.h"
#import "GPUImageErosionFilter.h"
#import "GPUImageDilationFilter.h"
@implementation GPUImageOpeningFilter
@synthesize verticalTexelSpacing = _verticalTexelSpacing;
@synthesize horizontalTexelSpacing = _horizontalTexelSpacing;
- (id)init;
{
if (!(self = [self initWithRadius:1]))
{
return nil;
}
return self;
}
- (id)initWithRadius:(NSUInteger)radius;
{
if (!(self = [super init]))
{
return nil;
}
// First pass: erosion
erosionFilter = [[GPUImageErosionFilter alloc] initWithRadius:radius];
[self addFilter:erosionFilter];
// Second pass: dilation
dilationFilter = [[GPUImageDilationFilter alloc] initWithRadius:radius];
[self addFilter:dilationFilter];
[erosionFilter addTarget:dilationFilter];
self.initialFilters = [NSArray arrayWithObjects:erosionFilter, nil];
self.terminalFilter = dilationFilter;
return self;
}
- (void)setVerticalTexelSpacing:(CGFloat)newValue;
{
_verticalTexelSpacing = newValue;
erosionFilter.verticalTexelSpacing = newValue;
dilationFilter.verticalTexelSpacing = newValue;
}
- (void)setHorizontalTexelSpacing:(CGFloat)newValue;
{
_horizontalTexelSpacing = newValue;
erosionFilter.horizontalTexelSpacing = newValue;
dilationFilter.horizontalTexelSpacing = newValue;
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageOpeningFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 320 |
```objective-c
#import "GPUImageGaussianBlurFilter.h"
/** A hardware-accelerated box blur of an image
*/
@interface GPUImageBoxBlurFilter : GPUImageGaussianBlurFilter
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageBoxBlurFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 41 |
```objective-c
#import "GPUImageFilter.h"
@interface GPUImageMonochromeFilter : GPUImageFilter
{
GLint intensityUniform, filterColorUniform;
}
@property(readwrite, nonatomic) CGFloat intensity;
@property(readwrite, nonatomic) GPUVector4 color;
- (void)setColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent;
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageMonochromeFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 76 |
```objective-c
//
// GPUImageMosaicFilter.m
#import "GPUImageMosaicFilter.h"
#import "GPUImagePicture.h"
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageMosaicFragmentShaderString = SHADER_STRING
(
precision highp float;
varying vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
uniform vec2 inputTileSize;
uniform vec2 displayTileSize;
uniform float numTiles;
uniform int colorOn;
void main()
{
vec2 xy = textureCoordinate;
xy = xy - mod(xy, displayTileSize);
vec4 lumcoeff = vec4(0.299,0.587,0.114,0.0);
vec4 inputColor = texture2D(inputImageTexture2, xy);
float lum = dot(inputColor,lumcoeff);
lum = 1.0 - lum;
float stepsize = 1.0 / numTiles;
float lumStep = (lum - mod(lum, stepsize)) / stepsize;
float rowStep = 1.0 / inputTileSize.x;
float x = mod(lumStep, rowStep);
float y = floor(lumStep / rowStep);
vec2 startCoord = vec2(float(x) * inputTileSize.x, float(y) * inputTileSize.y);
vec2 finalCoord = startCoord + ((textureCoordinate - xy) * (inputTileSize / displayTileSize));
vec4 color = texture2D(inputImageTexture, finalCoord);
if (colorOn == 1) {
color = color * inputColor;
}
gl_FragColor = color;
}
);
#else
NSString *const kGPUImageMosaicFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
uniform vec2 inputTileSize;
uniform vec2 displayTileSize;
uniform float numTiles;
uniform int colorOn;
void main()
{
vec2 xy = textureCoordinate;
xy = xy - mod(xy, displayTileSize);
vec4 lumcoeff = vec4(0.299,0.587,0.114,0.0);
vec4 inputColor = texture2D(inputImageTexture2, xy);
float lum = dot(inputColor,lumcoeff);
lum = 1.0 - lum;
float stepsize = 1.0 / numTiles;
float lumStep = (lum - mod(lum, stepsize)) / stepsize;
float rowStep = 1.0 / inputTileSize.x;
float x = mod(lumStep, rowStep);
float y = floor(lumStep / rowStep);
vec2 startCoord = vec2(float(x) * inputTileSize.x, float(y) * inputTileSize.y);
vec2 finalCoord = startCoord + ((textureCoordinate - xy) * (inputTileSize / displayTileSize));
vec4 color = texture2D(inputImageTexture, finalCoord);
if (colorOn == 1) {
color = color * inputColor;
}
gl_FragColor = color;
}
);
#endif
@implementation GPUImageMosaicFilter
@synthesize inputTileSize = _inputTileSize, numTiles = _numTiles, displayTileSize = _displayTileSize, colorOn = _colorOn;
@synthesize tileSet = _tileSet;
- (id)init;
{
if (!(self = [super initWithFragmentShaderFromString:kGPUImageMosaicFragmentShaderString]))
{
return nil;
}
inputTileSizeUniform = [filterProgram uniformIndex:@"inputTileSize"];
displayTileSizeUniform = [filterProgram uniformIndex:@"displayTileSize"];
numTilesUniform = [filterProgram uniformIndex:@"numTiles"];
colorOnUniform = [filterProgram uniformIndex:@"colorOn"];
CGSize its = CGSizeMake(0.125, 0.125);
CGSize dts = CGSizeMake(0.025, 0.025);
[self setDisplayTileSize:dts];
[self setInputTileSize:its];
[self setNumTiles:64.0];
[self setColorOn:YES];
//[self setTileSet:@"squares.png"];
return self;
}
- (void)setColorOn:(BOOL)yes
{
glUniform1i(colorOnUniform, yes);
}
- (void)setNumTiles:(float)numTiles
{
_numTiles = numTiles;
[self setFloat:_numTiles forUniformName:@"numTiles"];
}
- (void)setInputTileSize:(CGSize)inputTileSize
{
if (inputTileSize.width > 1.0) {
_inputTileSize.width = 1.0;
}
if (inputTileSize.height > 1.0) {
_inputTileSize.height = 1.0;
}
if (inputTileSize.width < 0.0) {
_inputTileSize.width = 0.0;
}
if (inputTileSize.height < 0.0) {
_inputTileSize.height = 0.0;
}
_inputTileSize = inputTileSize;
[self setSize:_inputTileSize forUniform:inputTileSizeUniform program:filterProgram];
}
-(void)setDisplayTileSize:(CGSize)displayTileSize
{
if (displayTileSize.width > 1.0) {
_displayTileSize.width = 1.0;
}
if (displayTileSize.height > 1.0) {
_displayTileSize.height = 1.0;
}
if (displayTileSize.width < 0.0) {
_displayTileSize.width = 0.0;
}
if (displayTileSize.height < 0.0) {
_displayTileSize.height = 0.0;
}
_displayTileSize = displayTileSize;
[self setSize:_displayTileSize forUniform:displayTileSizeUniform program:filterProgram];
}
-(void)setTileSet:(NSString *)tileSet
{
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
UIImage *img = [UIImage imageNamed:tileSet];
#else
NSImage *img = [NSImage imageNamed:tileSet];
#endif
pic = [[GPUImagePicture alloc] initWithImage:img smoothlyScaleOutput:YES];
[pic addTarget:self];
[pic processImage];
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageMosaicFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 1,404 |
```objective-c
#import "GPUImageSphereRefractionFilter.h"
@interface GPUImageGlassSphereFilter : GPUImageSphereRefractionFilter
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageGlassSphereFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 27 |
```objective-c
#import "GPUImageTwoInputFilter.h"
@interface GPUImageDifferenceBlendFilter : GPUImageTwoInputFilter
{
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageDifferenceBlendFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 27 |
```objective-c
#import "GPUImageFilter.h"
@interface GPUImageHistogramGenerator : GPUImageFilter
{
GLint backgroundColorUniform;
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageHistogramGenerator.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 27 |
```objective-c
#import "GPUImageFilter.h"
@interface GPUImageLuminanceRangeFilter : GPUImageFilter
{
GLint rangeReductionUniform;
}
/** The degree to reduce the luminance range, from 0.0 to 1.0. Default is 0.6.
*/
@property(readwrite, nonatomic) CGFloat rangeReductionFactor;
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageLuminanceRangeFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 73 |
```objective-c
#import "GPUImageMotionBlurFilter.h"
// Override vertex shader to remove dependent texture reads
NSString *const kGPUImageTiltedTexelSamplingVertexShaderString = SHADER_STRING
(
attribute vec4 position;
attribute vec4 inputTextureCoordinate;
uniform vec2 directionalTexelStep;
varying vec2 textureCoordinate;
varying vec2 oneStepBackTextureCoordinate;
varying vec2 twoStepsBackTextureCoordinate;
varying vec2 threeStepsBackTextureCoordinate;
varying vec2 fourStepsBackTextureCoordinate;
varying vec2 oneStepForwardTextureCoordinate;
varying vec2 twoStepsForwardTextureCoordinate;
varying vec2 threeStepsForwardTextureCoordinate;
varying vec2 fourStepsForwardTextureCoordinate;
void main()
{
gl_Position = position;
textureCoordinate = inputTextureCoordinate.xy;
oneStepBackTextureCoordinate = inputTextureCoordinate.xy - directionalTexelStep;
twoStepsBackTextureCoordinate = inputTextureCoordinate.xy - 2.0 * directionalTexelStep;
threeStepsBackTextureCoordinate = inputTextureCoordinate.xy - 3.0 * directionalTexelStep;
fourStepsBackTextureCoordinate = inputTextureCoordinate.xy - 4.0 * directionalTexelStep;
oneStepForwardTextureCoordinate = inputTextureCoordinate.xy + directionalTexelStep;
twoStepsForwardTextureCoordinate = inputTextureCoordinate.xy + 2.0 * directionalTexelStep;
threeStepsForwardTextureCoordinate = inputTextureCoordinate.xy + 3.0 * directionalTexelStep;
fourStepsForwardTextureCoordinate = inputTextureCoordinate.xy + 4.0 * directionalTexelStep;
}
);
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageMotionBlurFragmentShaderString = SHADER_STRING
(
precision highp float;
uniform sampler2D inputImageTexture;
varying vec2 textureCoordinate;
varying vec2 oneStepBackTextureCoordinate;
varying vec2 twoStepsBackTextureCoordinate;
varying vec2 threeStepsBackTextureCoordinate;
varying vec2 fourStepsBackTextureCoordinate;
varying vec2 oneStepForwardTextureCoordinate;
varying vec2 twoStepsForwardTextureCoordinate;
varying vec2 threeStepsForwardTextureCoordinate;
varying vec2 fourStepsForwardTextureCoordinate;
void main()
{
// Box weights
// lowp vec4 fragmentColor = texture2D(inputImageTexture, textureCoordinate) * 0.1111111;
// fragmentColor += texture2D(inputImageTexture, oneStepBackTextureCoordinate) * 0.1111111;
// fragmentColor += texture2D(inputImageTexture, twoStepsBackTextureCoordinate) * 0.1111111;
// fragmentColor += texture2D(inputImageTexture, threeStepsBackTextureCoordinate) * 0.1111111;
// fragmentColor += texture2D(inputImageTexture, fourStepsBackTextureCoordinate) * 0.1111111;
// fragmentColor += texture2D(inputImageTexture, oneStepForwardTextureCoordinate) * 0.1111111;
// fragmentColor += texture2D(inputImageTexture, twoStepsForwardTextureCoordinate) * 0.1111111;
// fragmentColor += texture2D(inputImageTexture, threeStepsForwardTextureCoordinate) * 0.1111111;
// fragmentColor += texture2D(inputImageTexture, fourStepsForwardTextureCoordinate) * 0.1111111;
lowp vec4 fragmentColor = texture2D(inputImageTexture, textureCoordinate) * 0.18;
fragmentColor += texture2D(inputImageTexture, oneStepBackTextureCoordinate) * 0.15;
fragmentColor += texture2D(inputImageTexture, twoStepsBackTextureCoordinate) * 0.12;
fragmentColor += texture2D(inputImageTexture, threeStepsBackTextureCoordinate) * 0.09;
fragmentColor += texture2D(inputImageTexture, fourStepsBackTextureCoordinate) * 0.05;
fragmentColor += texture2D(inputImageTexture, oneStepForwardTextureCoordinate) * 0.15;
fragmentColor += texture2D(inputImageTexture, twoStepsForwardTextureCoordinate) * 0.12;
fragmentColor += texture2D(inputImageTexture, threeStepsForwardTextureCoordinate) * 0.09;
fragmentColor += texture2D(inputImageTexture, fourStepsForwardTextureCoordinate) * 0.05;
gl_FragColor = fragmentColor;
}
);
#else
NSString *const kGPUImageMotionBlurFragmentShaderString = SHADER_STRING
(
uniform sampler2D inputImageTexture;
varying vec2 textureCoordinate;
varying vec2 oneStepBackTextureCoordinate;
varying vec2 twoStepsBackTextureCoordinate;
varying vec2 threeStepsBackTextureCoordinate;
varying vec2 fourStepsBackTextureCoordinate;
varying vec2 oneStepForwardTextureCoordinate;
varying vec2 twoStepsForwardTextureCoordinate;
varying vec2 threeStepsForwardTextureCoordinate;
varying vec2 fourStepsForwardTextureCoordinate;
void main()
{
// Box weights
// vec4 fragmentColor = texture2D(inputImageTexture, textureCoordinate) * 0.1111111;
// fragmentColor += texture2D(inputImageTexture, oneStepBackTextureCoordinate) * 0.1111111;
// fragmentColor += texture2D(inputImageTexture, twoStepsBackTextureCoordinate) * 0.1111111;
// fragmentColor += texture2D(inputImageTexture, threeStepsBackTextureCoordinate) * 0.1111111;
// fragmentColor += texture2D(inputImageTexture, fourStepsBackTextureCoordinate) * 0.1111111;
// fragmentColor += texture2D(inputImageTexture, oneStepForwardTextureCoordinate) * 0.1111111;
// fragmentColor += texture2D(inputImageTexture, twoStepsForwardTextureCoordinate) * 0.1111111;
// fragmentColor += texture2D(inputImageTexture, threeStepsForwardTextureCoordinate) * 0.1111111;
// fragmentColor += texture2D(inputImageTexture, fourStepsForwardTextureCoordinate) * 0.1111111;
vec4 fragmentColor = texture2D(inputImageTexture, textureCoordinate) * 0.18;
fragmentColor += texture2D(inputImageTexture, oneStepBackTextureCoordinate) * 0.15;
fragmentColor += texture2D(inputImageTexture, twoStepsBackTextureCoordinate) * 0.12;
fragmentColor += texture2D(inputImageTexture, threeStepsBackTextureCoordinate) * 0.09;
fragmentColor += texture2D(inputImageTexture, fourStepsBackTextureCoordinate) * 0.05;
fragmentColor += texture2D(inputImageTexture, oneStepForwardTextureCoordinate) * 0.15;
fragmentColor += texture2D(inputImageTexture, twoStepsForwardTextureCoordinate) * 0.12;
fragmentColor += texture2D(inputImageTexture, threeStepsForwardTextureCoordinate) * 0.09;
fragmentColor += texture2D(inputImageTexture, fourStepsForwardTextureCoordinate) * 0.05;
gl_FragColor = fragmentColor;
}
);
#endif
@interface GPUImageMotionBlurFilter()
{
GLint directionalTexelStepUniform;
}
- (void)recalculateTexelOffsets;
@end
@implementation GPUImageMotionBlurFilter
@synthesize blurSize = _blurSize;
@synthesize blurAngle = _blurAngle;
#pragma mark -
#pragma mark Initialization and teardown
- (id)init;
{
if (!(self = [super initWithVertexShaderFromString:kGPUImageTiltedTexelSamplingVertexShaderString fragmentShaderFromString:kGPUImageMotionBlurFragmentShaderString]))
{
return nil;
}
directionalTexelStepUniform = [filterProgram uniformIndex:@"directionalTexelStep"];
self.blurSize = 2.5;
self.blurAngle = 0.0;
return self;
}
- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;
{
CGSize oldInputSize = inputTextureSize;
[super setInputSize:newSize atIndex:textureIndex];
if (!CGSizeEqualToSize(oldInputSize, inputTextureSize) && (!CGSizeEqualToSize(newSize, CGSizeZero)) )
{
[self recalculateTexelOffsets];
}
}
- (void)recalculateTexelOffsets;
{
CGFloat aspectRatio = 1.0;
CGPoint texelOffsets;
if (GPUImageRotationSwapsWidthAndHeight(inputRotation))
{
aspectRatio = (inputTextureSize.width / inputTextureSize.height);
texelOffsets.x = _blurSize * sin(_blurAngle * M_PI / 180.0) * aspectRatio / inputTextureSize.height;
texelOffsets.y = _blurSize * cos(_blurAngle * M_PI / 180.0) / inputTextureSize.height;
}
else
{
aspectRatio = (inputTextureSize.height / inputTextureSize.width);
texelOffsets.x = _blurSize * cos(_blurAngle * M_PI / 180.0) * aspectRatio / inputTextureSize.width;
texelOffsets.y = _blurSize * sin(_blurAngle * M_PI / 180.0) / inputTextureSize.width;
}
[self setPoint:texelOffsets forUniform:directionalTexelStepUniform program:filterProgram];
}
#pragma mark -
#pragma mark Accessors
- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;
{
[super setInputRotation:newInputRotation atIndex:textureIndex];
[self recalculateTexelOffsets];
}
- (void)setBlurAngle:(CGFloat)newValue;
{
_blurAngle = newValue;
[self recalculateTexelOffsets];
}
- (void)setBlurSize:(CGFloat)newValue;
{
_blurSize = newValue;
[self recalculateTexelOffsets];
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageMotionBlurFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 2,080 |
```objective-c
#import "GPUImageOutput.h"
#import "GPUImageFilter.h"
@interface GPUImageFilterGroup : GPUImageOutput <GPUImageInput>
{
NSMutableArray *filters;
BOOL isEndProcessing;
}
@property(readwrite, nonatomic, strong) GPUImageOutput<GPUImageInput> *terminalFilter;
@property(readwrite, nonatomic, strong) NSArray *initialFilters;
@property(readwrite, nonatomic, strong) GPUImageOutput<GPUImageInput> *inputFilterToIgnoreForUpdates;
// Filter management
- (void)addFilter:(GPUImageOutput<GPUImageInput> *)newFilter;
- (GPUImageOutput<GPUImageInput> *)filterAtIndex:(NSUInteger)filterIndex;
- (NSUInteger)filterCount;
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageFilterGroup.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 151 |
```objective-c
#import "GPUImageStretchDistortionFilter.h"
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageStretchDistortionFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform highp vec2 center;
void main()
{
highp vec2 normCoord = 2.0 * textureCoordinate - 1.0;
highp vec2 normCenter = 2.0 * center - 1.0;
normCoord -= normCenter;
mediump vec2 s = sign(normCoord);
normCoord = abs(normCoord);
normCoord = 0.5 * normCoord + 0.5 * smoothstep(0.25, 0.5, normCoord) * normCoord;
normCoord = s * normCoord;
normCoord += normCenter;
mediump vec2 textureCoordinateToUse = normCoord / 2.0 + 0.5;
gl_FragColor = texture2D(inputImageTexture, textureCoordinateToUse );
}
);
#else
NSString *const kGPUImageStretchDistortionFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform vec2 center;
void main()
{
vec2 normCoord = 2.0 * textureCoordinate - 1.0;
vec2 normCenter = 2.0 * center - 1.0;
normCoord -= normCenter;
vec2 s = sign(normCoord);
normCoord = abs(normCoord);
normCoord = 0.5 * normCoord + 0.5 * smoothstep(0.25, 0.5, normCoord) * normCoord;
normCoord = s * normCoord;
normCoord += normCenter;
vec2 textureCoordinateToUse = normCoord / 2.0 + 0.5;
gl_FragColor = texture2D(inputImageTexture, textureCoordinateToUse);
}
);
#endif
@implementation GPUImageStretchDistortionFilter
@synthesize center = _center;
#pragma mark -
#pragma mark Initialization and teardown
- (id)init;
{
if (!(self = [super initWithFragmentShaderFromString:kGPUImageStretchDistortionFragmentShaderString]))
{
return nil;
}
centerUniform = [filterProgram uniformIndex:@"center"];
self.center = CGPointMake(0.5, 0.5);
return self;
}
#pragma mark -
#pragma mark Accessors
- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;
{
[super setInputRotation:newInputRotation atIndex:textureIndex];
[self setCenter:self.center];
}
- (void)setCenter:(CGPoint)newValue;
{
_center = newValue;
CGPoint rotatedPoint = [self rotatedPoint:_center forRotation:inputRotation];
[self setPoint:rotatedPoint forUniform:centerUniform program:filterProgram];
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageStretchDistortionFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 646 |
```objective-c
#import "GPUImageFilter.h"
typedef enum { kGPUImageHistogramRed, kGPUImageHistogramGreen, kGPUImageHistogramBlue, kGPUImageHistogramRGB, kGPUImageHistogramLuminance} GPUImageHistogramType;
@interface GPUImageHistogramFilter : GPUImageFilter
{
GPUImageHistogramType histogramType;
GLubyte *vertexSamplingCoordinates;
GLProgram *secondFilterProgram, *thirdFilterProgram;
GLint secondFilterPositionAttribute, thirdFilterPositionAttribute;
}
// Rather than sampling every pixel, this dictates what fraction of the image is sampled. By default, this is 16 with a minimum of 1.
@property(readwrite, nonatomic) NSUInteger downsamplingFactor;
// Initialization and teardown
- (id)initWithHistogramType:(GPUImageHistogramType)newHistogramType;
- (void)initializeSecondaryAttributes;
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageHistogramFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 175 |
```objective-c
#import "GPUImageFilter.h"
@interface GPUImageSphereRefractionFilter : GPUImageFilter
{
GLint radiusUniform, centerUniform, aspectRatioUniform, refractiveIndexUniform;
}
/// The center about which to apply the distortion, with a default of (0.5, 0.5)
@property(readwrite, nonatomic) CGPoint center;
/// The radius of the distortion, ranging from 0.0 to 1.0, with a default of 0.25
@property(readwrite, nonatomic) CGFloat radius;
/// The index of refraction for the sphere, with a default of 0.71
@property(readwrite, nonatomic) CGFloat refractiveIndex;
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageSphereRefractionFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 141 |
```objective-c
#import "GPUImageShiTomasiFeatureDetectionFilter.h"
@implementation GPUImageShiTomasiFeatureDetectionFilter
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageShiTomasiCornerDetectionFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform lowp float sensitivity;
void main()
{
mediump vec3 derivativeElements = texture2D(inputImageTexture, textureCoordinate).rgb;
mediump float derivativeDifference = derivativeElements.x - derivativeElements.y;
mediump float zElement = (derivativeElements.z * 2.0) - 1.0;
// R = Ix^2 + Iy^2 - sqrt( (Ix^2 - Iy^2)^2 + 4 * Ixy * Ixy)
mediump float cornerness = derivativeElements.x + derivativeElements.y - sqrt(derivativeDifference * derivativeDifference + 4.0 * zElement * zElement);
gl_FragColor = vec4(vec3(cornerness * sensitivity), 1.0);
}
);
#else
NSString *const kGPUImageShiTomasiCornerDetectionFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform float sensitivity;
void main()
{
vec3 derivativeElements = texture2D(inputImageTexture, textureCoordinate).rgb;
float derivativeDifference = derivativeElements.x - derivativeElements.y;
float zElement = (derivativeElements.z * 2.0) - 1.0;
// R = Ix^2 + Iy^2 - sqrt( (Ix^2 - Iy^2)^2 + 4 * Ixy * Ixy)
float cornerness = derivativeElements.x + derivativeElements.y - sqrt(derivativeDifference * derivativeDifference + 4.0 * zElement * zElement);
gl_FragColor = vec4(vec3(cornerness * sensitivity), 1.0);
}
);
#endif
#pragma mark -
#pragma mark Initialization and teardown
- (id)init;
{
if (!(self = [self initWithCornerDetectionFragmentShader:kGPUImageShiTomasiCornerDetectionFragmentShaderString]))
{
return nil;
}
self.sensitivity = 1.5;
return self;
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageShiTomasiFeatureDetectionFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 508 |
```objective-c
#import "GPUImageDissolveBlendFilter.h"
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageDissolveBlendFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
varying highp vec2 textureCoordinate2;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
uniform lowp float mixturePercent;
void main()
{
lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
lowp vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);
gl_FragColor = mix(textureColor, textureColor2, mixturePercent);
}
);
#else
NSString *const kGPUImageDissolveBlendFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
varying vec2 textureCoordinate2;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
uniform float mixturePercent;
void main()
{
vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);
gl_FragColor = mix(textureColor, textureColor2, mixturePercent);
}
);
#endif
@implementation GPUImageDissolveBlendFilter
@synthesize mix = _mix;
#pragma mark -
#pragma mark Initialization and teardown
- (id)init;
{
if (!(self = [super initWithFragmentShaderFromString:kGPUImageDissolveBlendFragmentShaderString]))
{
return nil;
}
mixUniform = [filterProgram uniformIndex:@"mixturePercent"];
self.mix = 0.5;
return self;
}
#pragma mark -
#pragma mark Accessors
- (void)setMix:(CGFloat)newValue;
{
_mix = newValue;
[self setFloat:_mix forUniform:mixUniform program:filterProgram];
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageDissolveBlendFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 403 |
```objective-c
#import "GPUImageTwoInputFilter.h"
@interface GPUImageLinearBurnBlendFilter : GPUImageTwoInputFilter
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageLinearBurnBlendFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 26 |
```objective-c
#import "GPUImageFilterGroup.h"
#import "GPUImageThresholdEdgeDetectionFilter.h"
#import "GPUImageParallelCoordinateLineTransformFilter.h"
#import "GPUImageThresholdedNonMaximumSuppressionFilter.h"
#import "GPUImageCannyEdgeDetectionFilter.h"
// This applies a Hough transform to detect lines in a scene. It starts with a thresholded Sobel edge detection pass,
// then takes those edge points in and applies a Hough transform to convert them to lines. The intersection of these lines
// is then determined via blending and accumulation, and a non-maximum suppression filter is applied to find local maxima.
// These local maxima are then converted back into lines in normal space and returned via a callback block.
//
// Rather than using one of the standard Hough transform types, this filter uses parallel coordinate space which is far more efficient
// to rasterize on a GPU.
//
// This approach is based entirely on the PC lines process developed by the Graph@FIT research group at the Brno University of Technology
// and described in their publications:
//
// M. Dubsk, J. Havel, and A. Herout. Real-Time Detection of Lines using Parallel Coordinates and OpenGL. Proceedings of SCCG 2011, Bratislava, SK, p. 7.
// path_to_url
// M. Dubsk, J. Havel, and A. Herout. PClines Line detection using parallel coordinates. 2011 IEEE Conference on Computer Vision and Pattern Recognition (CVPR), p. 1489- 1494.
// path_to_url
//#define DEBUGLINEDETECTION
@interface GPUImageHoughTransformLineDetector : GPUImageFilterGroup
{
GPUImageOutput<GPUImageInput> *thresholdEdgeDetectionFilter;
// GPUImageThresholdEdgeDetectionFilter *thresholdEdgeDetectionFilter;
GPUImageParallelCoordinateLineTransformFilter *parallelCoordinateLineTransformFilter;
GPUImageThresholdedNonMaximumSuppressionFilter *nonMaximumSuppressionFilter;
GLfloat *linesArray;
GLubyte *rawImagePixels;
}
// A threshold value for which a point is detected as belonging to an edge for determining lines. Default is 0.9.
@property(readwrite, nonatomic) CGFloat edgeThreshold;
// A threshold value for which a local maximum is detected as belonging to a line in parallel coordinate space. Default is 0.20.
@property(readwrite, nonatomic) CGFloat lineDetectionThreshold;
// This block is called on the detection of lines, usually on every processed frame. A C array containing normalized slopes and intercepts in m, b pairs (y=mx+b) is passed in, along with a count of the number of lines detected and the current timestamp of the video frame
@property(nonatomic, copy) void(^linesDetectedBlock)(GLfloat* lineArray, NSUInteger linesDetected, CMTime frameTime);
// These images are only enabled when built with DEBUGLINEDETECTION defined, and are used to examine the intermediate states of the Hough transform
@property(nonatomic, readonly, strong) NSMutableArray *intermediateImages;
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageHoughTransformLineDetector.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 634 |
```objective-c
#import "GPUImageMotionDetector.h"
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageMotionComparisonFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
varying highp vec2 textureCoordinate2;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
uniform highp float intensity;
void main()
{
lowp vec3 currentImageColor = texture2D(inputImageTexture, textureCoordinate).rgb;
lowp vec3 lowPassImageColor = texture2D(inputImageTexture2, textureCoordinate2).rgb;
mediump float colorDistance = distance(currentImageColor, lowPassImageColor); // * 0.57735
lowp float movementThreshold = step(0.2, colorDistance);
gl_FragColor = movementThreshold * vec4(textureCoordinate2.x, textureCoordinate2.y, 1.0, 1.0);
}
);
#else
NSString *const kGPUImageMotionComparisonFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
varying vec2 textureCoordinate2;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
uniform float intensity;
void main()
{
vec3 currentImageColor = texture2D(inputImageTexture, textureCoordinate).rgb;
vec3 lowPassImageColor = texture2D(inputImageTexture2, textureCoordinate2).rgb;
float colorDistance = distance(currentImageColor, lowPassImageColor); // * 0.57735
float movementThreshold = step(0.2, colorDistance);
gl_FragColor = movementThreshold * vec4(textureCoordinate2.x, textureCoordinate2.y, 1.0, 1.0);
}
);
#endif
@implementation GPUImageMotionDetector
@synthesize lowPassFilterStrength, motionDetectionBlock;
#pragma mark -
#pragma mark Initialization and teardown
- (id)init;
{
if (!(self = [super init]))
{
return nil;
}
// Start with a low pass filter to define the component to be removed
lowPassFilter = [[GPUImageLowPassFilter alloc] init];
[self addFilter:lowPassFilter];
// Take the difference of the current frame from the low pass filtered result to get the high pass
frameComparisonFilter = [[GPUImageTwoInputFilter alloc] initWithFragmentShaderFromString:kGPUImageMotionComparisonFragmentShaderString];
[self addFilter:frameComparisonFilter];
// Texture location 0 needs to be the original image for the difference blend
[lowPassFilter addTarget:frameComparisonFilter atTextureLocation:1];
// End with the average color for the scene to determine the centroid
averageColor = [[GPUImageAverageColor alloc] init];
__unsafe_unretained GPUImageMotionDetector *weakSelf = self;
[averageColor setColorAverageProcessingFinishedBlock:^(CGFloat redComponent, CGFloat greenComponent, CGFloat blueComponent, CGFloat alphaComponent, CMTime frameTime) {
if (weakSelf.motionDetectionBlock != NULL)
{
weakSelf.motionDetectionBlock(CGPointMake(redComponent / alphaComponent, greenComponent / alphaComponent), alphaComponent, frameTime);
}
// NSLog(@"Average X: %f, Y: %f total: %f", redComponent / alphaComponent, greenComponent / alphaComponent, alphaComponent);
}];
[frameComparisonFilter addTarget:averageColor];
self.initialFilters = [NSArray arrayWithObjects:lowPassFilter, frameComparisonFilter, nil];
self.terminalFilter = frameComparisonFilter;
self.lowPassFilterStrength = 0.5;
return self;
}
#pragma mark -
#pragma mark Accessors
- (void)setLowPassFilterStrength:(CGFloat)newValue;
{
lowPassFilter.filterStrength = newValue;
}
- (CGFloat)lowPassFilterStrength;
{
return lowPassFilter.filterStrength;
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageMotionDetector.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 828 |
```objective-c
#import "GPUImageScreenBlendFilter.h"
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageScreenBlendFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
varying highp vec2 textureCoordinate2;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
void main()
{
mediump vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
mediump vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);
mediump vec4 whiteColor = vec4(1.0);
gl_FragColor = whiteColor - ((whiteColor - textureColor2) * (whiteColor - textureColor));
}
);
#else
NSString *const kGPUImageScreenBlendFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
varying vec2 textureCoordinate2;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
void main()
{
vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);
vec4 whiteColor = vec4(1.0);
gl_FragColor = whiteColor - ((whiteColor - textureColor2) * (whiteColor - textureColor));
}
);
#endif
@implementation GPUImageScreenBlendFilter
- (id)init;
{
if (!(self = [super initWithFragmentShaderFromString:kGPUImageScreenBlendFragmentShaderString]))
{
return nil;
}
return self;
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageScreenBlendFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 336 |
```objective-c
#import "GPUImageFilter.h"
/** Adjusts the saturation of an image
*/
@interface GPUImageSaturationFilter : GPUImageFilter
{
GLint saturationUniform;
}
/** Saturation ranges from 0.0 (fully desaturated) to 2.0 (max saturation), with 1.0 as the normal level
*/
@property(readwrite, nonatomic) CGFloat saturation;
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageSaturationFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 82 |
```objective-c
#import "GPUImageFilterPipeline.h"
@interface GPUImageFilterPipeline ()
- (BOOL)_parseConfiguration:(NSDictionary *)configuration;
- (void)_refreshFilters;
@end
@implementation GPUImageFilterPipeline
@synthesize filters = _filters, input = _input, output = _output;
#pragma mark Config file init
- (id)initWithConfiguration:(NSDictionary *)configuration input:(GPUImageOutput *)input output:(id <GPUImageInput>)output {
self = [super init];
if (self) {
self.input = input;
self.output = output;
if (![self _parseConfiguration:configuration]) {
NSLog(@"Sorry, a parsing error occurred.");
abort();
}
[self _refreshFilters];
}
return self;
}
- (id)initWithConfigurationFile:(NSURL *)configuration input:(GPUImageOutput *)input output:(id <GPUImageInput>)output {
return [self initWithConfiguration:[NSDictionary dictionaryWithContentsOfURL:configuration] input:input output:output];
}
- (BOOL)_parseConfiguration:(NSDictionary *)configuration {
NSArray *filters = [configuration objectForKey:@"Filters"];
if (!filters) {
return NO;
}
NSError *regexError = nil;
NSRegularExpression *parsingRegex = [NSRegularExpression regularExpressionWithPattern:@"(float|CGPoint|NSString)\\((.*?)(?:,\\s*(.*?))*\\)"
options:0
error:®exError];
// It's faster to put them into an array and then pass it to the filters property than it is to call [self addFilter:] every time
NSMutableArray *orderedFilters = [NSMutableArray arrayWithCapacity:[filters count]];
for (NSDictionary *filter in filters) {
NSString *filterName = [filter objectForKey:@"FilterName"];
Class theClass = NSClassFromString(filterName);
GPUImageOutput<GPUImageInput> *genericFilter = [[theClass alloc] init];
// Set up the properties
NSDictionary *filterAttributes;
if ((filterAttributes = [filter objectForKey:@"Attributes"])) {
for (NSString *propertyKey in filterAttributes) {
// Set up the selector
SEL theSelector = NSSelectorFromString(propertyKey);
NSInvocation *inv = [NSInvocation invocationWithMethodSignature:[theClass instanceMethodSignatureForSelector:theSelector]];
[inv setSelector:theSelector];
[inv setTarget:genericFilter];
// check selector given with parameter
if ([propertyKey hasSuffix:@":"]) {
stringValue = nil;
// Then parse the arguments
NSMutableArray *parsedArray;
if ([[filterAttributes objectForKey:propertyKey] isKindOfClass:[NSArray class]]) {
NSArray *array = [filterAttributes objectForKey:propertyKey];
parsedArray = [NSMutableArray arrayWithCapacity:[array count]];
for (NSString *string in array) {
NSTextCheckingResult *parse = [parsingRegex firstMatchInString:string
options:0
range:NSMakeRange(0, [string length])];
NSString *modifier = [string substringWithRange:[parse rangeAtIndex:1]];
if ([modifier isEqualToString:@"float"]) {
// Float modifier, one argument
CGFloat value = [[string substringWithRange:[parse rangeAtIndex:2]] floatValue];
[parsedArray addObject:[NSNumber numberWithFloat:value]];
[inv setArgument:&value atIndex:2];
} else if ([modifier isEqualToString:@"CGPoint"]) {
// CGPoint modifier, two float arguments
CGFloat x = [[string substringWithRange:[parse rangeAtIndex:2]] floatValue];
CGFloat y = [[string substringWithRange:[parse rangeAtIndex:3]] floatValue];
CGPoint value = CGPointMake(x, y);
[parsedArray addObject:[NSValue valueWithCGPoint:value]];
} else if ([modifier isEqualToString:@"NSString"]) {
// NSString modifier, one string argument
stringValue = [[string substringWithRange:[parse rangeAtIndex:2]] copy];
[inv setArgument:&stringValue atIndex:2];
} else {
return NO;
}
}
[inv setArgument:&parsedArray atIndex:2];
} else {
NSString *string = [filterAttributes objectForKey:propertyKey];
NSTextCheckingResult *parse = [parsingRegex firstMatchInString:string
options:0
range:NSMakeRange(0, [string length])];
NSString *modifier = [string substringWithRange:[parse rangeAtIndex:1]];
if ([modifier isEqualToString:@"float"]) {
// Float modifier, one argument
CGFloat value = [[string substringWithRange:[parse rangeAtIndex:2]] floatValue];
[inv setArgument:&value atIndex:2];
} else if ([modifier isEqualToString:@"CGPoint"]) {
// CGPoint modifier, two float arguments
CGFloat x = [[string substringWithRange:[parse rangeAtIndex:2]] floatValue];
CGFloat y = [[string substringWithRange:[parse rangeAtIndex:3]] floatValue];
CGPoint value = CGPointMake(x, y);
[inv setArgument:&value atIndex:2];
} else if ([modifier isEqualToString:@"NSString"]) {
// NSString modifier, one string argument
stringValue = [[string substringWithRange:[parse rangeAtIndex:2]] copy];
[inv setArgument:&stringValue atIndex:2];
} else {
return NO;
}
}
}
[inv invoke];
}
}
[orderedFilters addObject:genericFilter];
}
self.filters = orderedFilters;
return YES;
}
#pragma mark Regular init
- (id)initWithOrderedFilters:(NSArray *)filters input:(GPUImageOutput *)input output:(id <GPUImageInput>)output {
self = [super init];
if (self) {
self.input = input;
self.output = output;
self.filters = [NSMutableArray arrayWithArray:filters];
[self _refreshFilters];
}
return self;
}
- (void)addFilter:(GPUImageOutput<GPUImageInput> *)filter atIndex:(NSUInteger)insertIndex {
[self.filters insertObject:filter atIndex:insertIndex];
[self _refreshFilters];
}
- (void)addFilter:(GPUImageOutput<GPUImageInput> *)filter {
[self.filters addObject:filter];
[self _refreshFilters];
}
- (void)replaceFilterAtIndex:(NSUInteger)index withFilter:(GPUImageOutput<GPUImageInput> *)filter {
[self.filters replaceObjectAtIndex:index withObject:filter];
[self _refreshFilters];
}
- (void) removeFilter:(GPUImageOutput<GPUImageInput> *)filter;
{
[self.filters removeObject:filter];
[self _refreshFilters];
}
- (void)removeFilterAtIndex:(NSUInteger)index {
[self.filters removeObjectAtIndex:index];
[self _refreshFilters];
}
- (void)removeAllFilters {
[self.filters removeAllObjects];
[self _refreshFilters];
}
- (void)replaceAllFilters:(NSArray *)newFilters {
self.filters = [NSMutableArray arrayWithArray:newFilters];
[self _refreshFilters];
}
- (void)_refreshFilters {
id prevFilter = self.input;
GPUImageOutput<GPUImageInput> *theFilter = nil;
for (int i = 0; i < [self.filters count]; i++) {
theFilter = [self.filters objectAtIndex:i];
[prevFilter removeAllTargets];
[prevFilter addTarget:theFilter];
prevFilter = theFilter;
}
[prevFilter removeAllTargets];
if (self.output != nil) {
[prevFilter addTarget:self.output];
}
}
- (UIImage *)currentFilteredFrame {
return [(GPUImageOutput<GPUImageInput> *)[_filters lastObject] imageFromCurrentFramebuffer];
}
- (UIImage *)currentFilteredFrameWithOrientation:(UIImageOrientation)imageOrientation {
return [(GPUImageOutput<GPUImageInput> *)[_filters lastObject] imageFromCurrentFramebufferWithOrientation:imageOrientation];
}
- (CGImageRef)newCGImageFromCurrentFilteredFrame {
return [(GPUImageOutput<GPUImageInput> *)[_filters lastObject] newCGImageFromCurrentlyProcessedOutput];
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageFilterPipeline.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 1,687 |
```objective-c
#import "GPUImageXYDerivativeFilter.h"
// I'm using the Prewitt operator to obtain the derivative, then squaring the X and Y components and placing the product of the two in Z.
// In tests, Prewitt seemed to be tied with Sobel for the best, and it's just a little cheaper to compute.
// This is primarily intended to be used with corner detection filters.
@implementation GPUImageXYDerivativeFilter
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageGradientFragmentShaderString = SHADER_STRING
(
precision highp float;
varying vec2 textureCoordinate;
varying vec2 leftTextureCoordinate;
varying vec2 rightTextureCoordinate;
varying vec2 topTextureCoordinate;
varying vec2 topLeftTextureCoordinate;
varying vec2 topRightTextureCoordinate;
varying vec2 bottomTextureCoordinate;
varying vec2 bottomLeftTextureCoordinate;
varying vec2 bottomRightTextureCoordinate;
uniform sampler2D inputImageTexture;
uniform float edgeStrength;
void main()
{
float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r;
float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r;
float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r;
float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r;
float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r;
float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r;
float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r;
float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r;
float verticalDerivative = -topLeftIntensity - topIntensity - topRightIntensity + bottomLeftIntensity + bottomIntensity + bottomRightIntensity;
float horizontalDerivative = -bottomLeftIntensity - leftIntensity - topLeftIntensity + bottomRightIntensity + rightIntensity + topRightIntensity;
verticalDerivative = verticalDerivative * edgeStrength;
horizontalDerivative = horizontalDerivative * edgeStrength;
// Scaling the X * Y operation so that negative numbers are not clipped in the 0..1 range. This will be expanded in the corner detection filter
gl_FragColor = vec4(horizontalDerivative * horizontalDerivative, verticalDerivative * verticalDerivative, ((verticalDerivative * horizontalDerivative) + 1.0) / 2.0, 1.0);
}
);
#else
NSString *const kGPUImageGradientFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
varying vec2 leftTextureCoordinate;
varying vec2 rightTextureCoordinate;
varying vec2 topTextureCoordinate;
varying vec2 topLeftTextureCoordinate;
varying vec2 topRightTextureCoordinate;
varying vec2 bottomTextureCoordinate;
varying vec2 bottomLeftTextureCoordinate;
varying vec2 bottomRightTextureCoordinate;
uniform sampler2D inputImageTexture;
uniform float edgeStrength;
void main()
{
float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r;
float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r;
float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r;
float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r;
float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r;
float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r;
float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r;
float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r;
float verticalDerivative = -topLeftIntensity - topIntensity - topRightIntensity + bottomLeftIntensity + bottomIntensity + bottomRightIntensity;
float horizontalDerivative = -bottomLeftIntensity - leftIntensity - topLeftIntensity + bottomRightIntensity + rightIntensity + topRightIntensity;
verticalDerivative = verticalDerivative * edgeStrength;
horizontalDerivative = horizontalDerivative * edgeStrength;
// Scaling the X * Y operation so that negative numbers are not clipped in the 0..1 range. This will be expanded in the corner detection filter
gl_FragColor = vec4(horizontalDerivative * horizontalDerivative, verticalDerivative * verticalDerivative, ((verticalDerivative * horizontalDerivative) + 1.0) / 2.0, 1.0);
}
);
#endif
#pragma mark -
#pragma mark Initialization and teardown
- (id)init;
{
if (!(self = [self initWithFragmentShaderFromString:kGPUImageGradientFragmentShaderString]))
{
return nil;
}
self.edgeStrength = 1.0;
return self;
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageXYDerivativeFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 1,016 |
```objective-c
#import "GPUImageLuminanceRangeFilter.h"
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageLuminanceRangeFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform lowp float rangeReduction;
// Values from "Graphics Shaders: Theory and Practice" by Bailey and Cunningham
const mediump vec3 luminanceWeighting = vec3(0.2125, 0.7154, 0.0721);
void main()
{
lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
mediump float luminance = dot(textureColor.rgb, luminanceWeighting);
mediump float luminanceRatio = ((0.5 - luminance) * rangeReduction);
gl_FragColor = vec4((textureColor.rgb) + (luminanceRatio), textureColor.w);
}
);
#else
NSString *const kGPUImageLuminanceRangeFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform float rangeReduction;
// Values from "Graphics Shaders: Theory and Practice" by Bailey and Cunningham
const vec3 luminanceWeighting = vec3(0.2125, 0.7154, 0.0721);
void main()
{
vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
float luminance = dot(textureColor.rgb, luminanceWeighting);
float luminanceRatio = ((0.5 - luminance) * rangeReduction);
gl_FragColor = vec4((textureColor.rgb) + (luminanceRatio), textureColor.w);
}
);
#endif
@implementation GPUImageLuminanceRangeFilter
@synthesize rangeReductionFactor = _rangeReductionFactor;
#pragma mark -
#pragma mark Initialization and teardown
- (id)init;
{
if (!(self = [super initWithFragmentShaderFromString:kGPUImageLuminanceRangeFragmentShaderString]))
{
return nil;
}
rangeReductionUniform = [filterProgram uniformIndex:@"rangeReduction"];
self.rangeReductionFactor = 0.6;
return self;
}
#pragma mark -
#pragma mark Accessors
- (void)setRangeReductionFactor:(CGFloat)newValue;
{
_rangeReductionFactor = newValue;
[self setFloat:_rangeReductionFactor forUniform:rangeReductionUniform program:filterProgram];
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageLuminanceRangeFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 534 |
```objective-c
#import "GPUImageTwoPassFilter.h"
@interface GPUImageTwoPassTextureSamplingFilter : GPUImageTwoPassFilter
{
GLint verticalPassTexelWidthOffsetUniform, verticalPassTexelHeightOffsetUniform, horizontalPassTexelWidthOffsetUniform, horizontalPassTexelHeightOffsetUniform;
GLfloat verticalPassTexelWidthOffset, verticalPassTexelHeightOffset, horizontalPassTexelWidthOffset, horizontalPassTexelHeightOffset;
CGFloat _verticalTexelSpacing, _horizontalTexelSpacing;
}
// This sets the spacing between texels (in pixels) when sampling for the first. By default, this is 1.0
@property(readwrite, nonatomic) CGFloat verticalTexelSpacing, horizontalTexelSpacing;
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageTwoPassTextureSamplingFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 152 |
```objective-c
#import "GPUImageTextureOutput.h"
@implementation GPUImageTextureOutput
@synthesize delegate = _delegate;
@synthesize texture = _texture;
@synthesize enabled;
#pragma mark -
#pragma mark Initialization and teardown
- (id)init;
{
if (!(self = [super init]))
{
return nil;
}
self.enabled = YES;
return self;
}
- (void)doneWithTexture;
{
[firstInputFramebuffer unlock];
}
#pragma mark -
#pragma mark GPUImageInput protocol
- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex;
{
[_delegate newFrameReadyFromTextureOutput:self];
}
- (NSInteger)nextAvailableTextureIndex;
{
return 0;
}
// TODO: Deal with the fact that the texture changes regularly as a result of the caching
- (void)setInputFramebuffer:(GPUImageFramebuffer *)newInputFramebuffer atIndex:(NSInteger)textureIndex;
{
firstInputFramebuffer = newInputFramebuffer;
[firstInputFramebuffer lock];
_texture = [firstInputFramebuffer texture];
}
- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;
{
}
- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;
{
}
- (CGSize)maximumOutputSize;
{
return CGSizeZero;
}
- (void)endProcessing
{
}
- (BOOL)shouldIgnoreUpdatesToThisTarget;
{
return NO;
}
- (BOOL)wantsMonochromeInput;
{
return NO;
}
- (void)setCurrentlyReceivingMonochromeInput:(BOOL)newValue;
{
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageTextureOutput.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 339 |
```objective-c
#import "GPUImageSoftEleganceFilter.h"
#import "GPUImagePicture.h"
#import "GPUImageLookupFilter.h"
#import "GPUImageGaussianBlurFilter.h"
#import "GPUImageAlphaBlendFilter.h"
@implementation GPUImageSoftEleganceFilter
- (id)init;
{
if (!(self = [super init]))
{
return nil;
}
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
UIImage *image1 = [UIImage imageNamed:@"lookup_soft_elegance_1.png"];
UIImage *image2 = [UIImage imageNamed:@"lookup_soft_elegance_2.png"];
#else
NSImage *image1 = [NSImage imageNamed:@"lookup_soft_elegance_1.png"];
NSImage *image2 = [NSImage imageNamed:@"lookup_soft_elegance_2.png"];
#endif
NSAssert(image1 && image2,
@"To use GPUImageSoftEleganceFilter you need to add lookup_soft_elegance_1.png and lookup_soft_elegance_2.png from GPUImage/framework/Resources to your application bundle.");
lookupImageSource1 = [[GPUImagePicture alloc] initWithImage:image1];
GPUImageLookupFilter *lookupFilter1 = [[GPUImageLookupFilter alloc] init];
[self addFilter:lookupFilter1];
[lookupImageSource1 addTarget:lookupFilter1 atTextureLocation:1];
[lookupImageSource1 processImage];
GPUImageGaussianBlurFilter *gaussianBlur = [[GPUImageGaussianBlurFilter alloc] init];
gaussianBlur.blurRadiusInPixels = 10.0;
[lookupFilter1 addTarget:gaussianBlur];
[self addFilter:gaussianBlur];
GPUImageAlphaBlendFilter *alphaBlend = [[GPUImageAlphaBlendFilter alloc] init];
alphaBlend.mix = 0.14;
[lookupFilter1 addTarget:alphaBlend];
[gaussianBlur addTarget:alphaBlend];
[self addFilter:alphaBlend];
lookupImageSource2 = [[GPUImagePicture alloc] initWithImage:image2];
GPUImageLookupFilter *lookupFilter2 = [[GPUImageLookupFilter alloc] init];
[alphaBlend addTarget:lookupFilter2];
[lookupImageSource2 addTarget:lookupFilter2];
[lookupImageSource2 processImage];
[self addFilter:lookupFilter2];
self.initialFilters = [NSArray arrayWithObjects:lookupFilter1, nil];
self.terminalFilter = lookupFilter2;
return self;
}
#pragma mark -
#pragma mark Accessors
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageSoftEleganceFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 536 |
```objective-c
#import "GPUImageTwoInputFilter.h"
@interface GPUImageTwoInputCrossTextureSamplingFilter : GPUImageTwoInputFilter
{
GLint texelWidthUniform, texelHeightUniform;
CGFloat texelWidth, texelHeight;
BOOL hasOverriddenImageSizeFactor;
}
// The texel width and height determines how far out to sample from this texel. By default, this is the normalized width of a pixel, but this can be overridden for different effects.
@property(readwrite, nonatomic) CGFloat texelWidth;
@property(readwrite, nonatomic) CGFloat texelHeight;
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageTwoInputCrossTextureSamplingFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 123 |
```objective-c
#import "GPUImageFilterGroup.h"
#import "GPUImageLowPassFilter.h"
#import "GPUImageAverageColor.h"
@interface GPUImageMotionDetector : GPUImageFilterGroup
{
GPUImageLowPassFilter *lowPassFilter;
GPUImageTwoInputFilter *frameComparisonFilter;
GPUImageAverageColor *averageColor;
}
// This controls the low pass filter strength used to compare the current frame with previous ones to detect motion. This ranges from 0.0 to 1.0, with a default of 0.5.
@property(readwrite, nonatomic) CGFloat lowPassFilterStrength;
// For every frame, this will feed back the calculated centroid of the motion, as well as a relative intensity.
@property(nonatomic, copy) void(^motionDetectionBlock)(CGPoint motionCentroid, CGFloat motionIntensity, CMTime frameTime);
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageMotionDetector.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 175 |
```objective-c
#import "GPUImageTextureInput.h"
@implementation GPUImageTextureInput
#pragma mark -
#pragma mark Initialization and teardown
- (id)initWithTexture:(GLuint)newInputTexture size:(CGSize)newTextureSize;
{
if (!(self = [super init]))
{
return nil;
}
runSynchronouslyOnVideoProcessingQueue(^{
[GPUImageContext useImageProcessingContext];
});
textureSize = newTextureSize;
runSynchronouslyOnVideoProcessingQueue(^{
outputFramebuffer = [[GPUImageFramebuffer alloc] initWithSize:newTextureSize overriddenTexture:newInputTexture];
});
return self;
}
#pragma mark -
#pragma mark Image rendering
- (void)processTextureWithFrameTime:(CMTime)frameTime;
{
runAsynchronouslyOnVideoProcessingQueue(^{
for (id<GPUImageInput> currentTarget in targets)
{
NSInteger indexOfObject = [targets indexOfObject:currentTarget];
NSInteger targetTextureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
[currentTarget setInputSize:textureSize atIndex:targetTextureIndex];
[currentTarget setInputFramebuffer:outputFramebuffer atIndex:targetTextureIndex];
[currentTarget newFrameReadyAtTime:frameTime atIndex:targetTextureIndex];
}
});
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageTextureInput.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 263 |
```objective-c
#import "GPUImageParallelCoordinateLineTransformFilter.h"
NSString *const kGPUImageHoughAccumulationVertexShaderString = SHADER_STRING
(
attribute vec4 position;
void main()
{
gl_Position = position;
}
);
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageHoughAccumulationFragmentShaderString = SHADER_STRING
(
const lowp float scalingFactor = 1.0 / 256.0;
void main()
{
gl_FragColor = vec4(0.004, 0.004, 0.004, 1.0);
}
);
// highp - 16-bit, floating point range: -2^62 to 2^62, integer range: -2^16 to 2^16
// NOTE: See below for where I'm tacking on the required extension as a prefix
NSString *const kGPUImageHoughAccumulationFBOReadFragmentShaderString = SHADER_STRING
(
const lowp float scalingFactor = 0.004;
// const lowp float scalingFactor = 0.1;
void main()
{
mediump vec4 fragmentData = gl_LastFragData[0];
fragmentData.r = fragmentData.r + scalingFactor;
fragmentData.g = scalingFactor * floor(fragmentData.r) + fragmentData.g;
fragmentData.b = scalingFactor * floor(fragmentData.g) + fragmentData.b;
fragmentData.a = scalingFactor * floor(fragmentData.b) + fragmentData.a;
fragmentData = fract(fragmentData);
gl_FragColor = vec4(fragmentData.rgb, 1.0);
}
);
#else
NSString *const kGPUImageHoughAccumulationFragmentShaderString = SHADER_STRING
(
const float scalingFactor = 1.0 / 256.0;
void main()
{
gl_FragColor = vec4(0.004, 0.004, 0.004, 1.0);
}
);
NSString *const kGPUImageHoughAccumulationFBOReadFragmentShaderString = SHADER_STRING
(
const float scalingFactor = 1.0 / 256.0;
void main()
{
// gl_FragColor = vec4(scalingFactor, scalingFactor, scalingFactor, 1.0);
gl_FragColor = vec4(0.004, 0.004, 0.004, 1.0);
}
);
#endif
@interface GPUImageParallelCoordinateLineTransformFilter()
// Rendering
- (void)generateLineCoordinates;
@end
@implementation GPUImageParallelCoordinateLineTransformFilter
#pragma mark -
#pragma mark Initialization and teardown
- (id)init;
{
NSString *fragmentShaderToUse = nil;
if ([GPUImageContext deviceSupportsFramebufferReads])
{
fragmentShaderToUse = [NSString stringWithFormat:@"#extension GL_EXT_shader_framebuffer_fetch : require\n %@",kGPUImageHoughAccumulationFBOReadFragmentShaderString];
}
else
{
fragmentShaderToUse = kGPUImageHoughAccumulationFragmentShaderString;
}
if (!(self = [super initWithVertexShaderFromString:kGPUImageHoughAccumulationVertexShaderString fragmentShaderFromString:fragmentShaderToUse]))
{
return nil;
}
return self;
}
// TODO: have this be regenerated on change of image size
- (void)dealloc;
{
free(rawImagePixels);
free(lineCoordinates);
}
- (void)initializeAttributes;
{
[filterProgram addAttribute:@"position"];
}
#pragma mark -
#pragma mark Rendering
#define MAXLINESCALINGFACTOR 4
- (void)generateLineCoordinates;
{
unsigned int imageByteSize = inputTextureSize.width * inputTextureSize.height * 4;
rawImagePixels = (GLubyte *)malloc(imageByteSize);
maxLinePairsToRender = (inputTextureSize.width * inputTextureSize.height) / MAXLINESCALINGFACTOR;
lineCoordinates = calloc(maxLinePairsToRender * 8, sizeof(GLfloat));
}
- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex;
{
if (lineCoordinates == NULL)
{
[self generateLineCoordinates];
}
[self renderToTextureWithVertices:NULL textureCoordinates:NULL];
[self informTargetsAboutNewFrameAtTime:frameTime];
}
- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;
{
// we need a normal color texture for this filter
NSAssert(self.outputTextureOptions.internalFormat == GL_RGBA, @"The output texture format for this filter must be GL_RGBA.");
NSAssert(self.outputTextureOptions.type == GL_UNSIGNED_BYTE, @"The type of the output texture of this filter must be GL_UNSIGNED_BYTE.");
if (self.preventRendering)
{
[firstInputFramebuffer unlock];
return;
}
// Grab the edge points from the previous frame and create the parallel coordinate lines for them
// This would be a great place to have a working histogram pyramid implementation
[GPUImageContext useImageProcessingContext];
[firstInputFramebuffer activateFramebuffer];
glFinish();
glReadPixels(0, 0, inputTextureSize.width, inputTextureSize.height, GL_RGBA, GL_UNSIGNED_BYTE, rawImagePixels);
CGFloat xAspectMultiplier = 1.0, yAspectMultiplier = 1.0;
// if (inputTextureSize.width > inputTextureSize.height)
// {
// yAspectMultiplier = inputTextureSize.height / inputTextureSize.width;
// }
// else
// {
// xAspectMultiplier = inputTextureSize.width / inputTextureSize.height;
// }
// CFAbsoluteTime startTime = CFAbsoluteTimeGetCurrent();
unsigned int imageByteSize = inputTextureSize.width * inputTextureSize.height * 4;
unsigned int imageWidth = inputTextureSize.width * 4;
linePairsToRender = 0;
unsigned int currentByte = 0;
unsigned int lineStorageIndex = 0;
unsigned int maxLineStorageIndex = maxLinePairsToRender * 8 - 8;
GLfloat minY = 100, maxY = -100, minX = 100, maxX = -100;
while (currentByte < imageByteSize)
{
GLubyte colorByte = rawImagePixels[currentByte];
if (colorByte > 0)
{
unsigned int xCoordinate = currentByte % imageWidth;
unsigned int yCoordinate = currentByte / imageWidth;
CGFloat normalizedXCoordinate = (-1.0 + 2.0 * (CGFloat)(xCoordinate / 4) / inputTextureSize.width) * xAspectMultiplier;
CGFloat normalizedYCoordinate = (-1.0 + 2.0 * (CGFloat)(yCoordinate) / inputTextureSize.height) * yAspectMultiplier;
minY = MIN(minY, normalizedYCoordinate);
maxY = MAX(maxY, normalizedYCoordinate);
minX = MIN(minX, normalizedXCoordinate);
maxX = MAX(maxX, normalizedXCoordinate);
// NSLog(@"Parallel line coordinates: (%f, %f) - (%f, %f) - (%f, %f)", -1.0, -normalizedYCoordinate, 0.0, normalizedXCoordinate, 1.0, normalizedYCoordinate);
// T space coordinates, (-d, -y) to (0, x)
lineCoordinates[lineStorageIndex++] = -1.0;
lineCoordinates[lineStorageIndex++] = -normalizedYCoordinate;
lineCoordinates[lineStorageIndex++] = 0.0;
lineCoordinates[lineStorageIndex++] = normalizedXCoordinate;
// S space coordinates, (0, x) to (d, y)
lineCoordinates[lineStorageIndex++] = 0.0;
lineCoordinates[lineStorageIndex++] = normalizedXCoordinate;
lineCoordinates[lineStorageIndex++] = 1.0;
lineCoordinates[lineStorageIndex++] = normalizedYCoordinate;
linePairsToRender++;
linePairsToRender = MIN(linePairsToRender, maxLinePairsToRender);
lineStorageIndex = MIN(lineStorageIndex, maxLineStorageIndex);
}
currentByte +=8;
}
// NSLog(@"Line pairs to render: %d out of max: %d", linePairsToRender, maxLinePairsToRender);
// CFAbsoluteTime currentFrameTime = (CFAbsoluteTimeGetCurrent() - startTime);
// NSLog(@"Line generation processing time : %f ms", 1000.0 * currentFrameTime);
outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:[self sizeOfFBO] textureOptions:self.outputTextureOptions onlyTexture:NO];
[outputFramebuffer activateFramebuffer];
if (usingNextFrameForImageCapture)
{
[outputFramebuffer lock];
}
[GPUImageContext setActiveShaderProgram:filterProgram];
[self setUniformsForProgramAtIndex:0];
glClearColor(0.0, 0.0, 0.0, 1.0);
glClear(GL_COLOR_BUFFER_BIT);
if (![GPUImageContext deviceSupportsFramebufferReads])
{
glBlendEquation(GL_FUNC_ADD);
glBlendFunc(GL_ONE, GL_ONE);
glEnable(GL_BLEND);
}
else
{
}
glLineWidth(1);
glVertexAttribPointer(filterPositionAttribute, 2, GL_FLOAT, 0, 0, lineCoordinates);
glDrawArrays(GL_LINES, 0, (linePairsToRender * 4));
if (![GPUImageContext deviceSupportsFramebufferReads])
{
glDisable(GL_BLEND);
}
[firstInputFramebuffer unlock];
if (usingNextFrameForImageCapture)
{
dispatch_semaphore_signal(imageCaptureSemaphore);
}
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageParallelCoordinateLineTransformFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 2,092 |
```objective-c
#import "GPUImageLineGenerator.h"
NSString *const kGPUImageLineGeneratorVertexShaderString = SHADER_STRING
(
attribute vec4 position;
void main()
{
gl_Position = position;
}
);
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageLineGeneratorFragmentShaderString = SHADER_STRING
(
uniform lowp vec3 lineColor;
void main()
{
gl_FragColor = vec4(lineColor, 1.0);
}
);
#else
NSString *const kGPUImageLineGeneratorFragmentShaderString = SHADER_STRING
(
uniform vec3 lineColor;
void main()
{
gl_FragColor = vec4(lineColor, 1.0);
}
);
#endif
@interface GPUImageLineGenerator()
- (void)generateLineCoordinates;
@end
@implementation GPUImageLineGenerator
@synthesize lineWidth = _lineWidth;
#pragma mark -
#pragma mark Initialization and teardown
- (id)init;
{
if (!(self = [super initWithVertexShaderFromString:kGPUImageLineGeneratorVertexShaderString fragmentShaderFromString:kGPUImageLineGeneratorFragmentShaderString]))
{
return nil;
}
runSynchronouslyOnVideoProcessingQueue(^{
lineWidthUniform = [filterProgram uniformIndex:@"lineWidth"];
lineColorUniform = [filterProgram uniformIndex:@"lineColor"];
self.lineWidth = 1.0;
[self setLineColorRed:0.0 green:1.0 blue:0.0];
});
return self;
}
- (void)dealloc
{
if (lineCoordinates)
{
free(lineCoordinates);
}
}
#pragma mark -
#pragma mark Rendering
- (void)generateLineCoordinates;
{
lineCoordinates = calloc(1024 * 4, sizeof(GLfloat));
}
- (void)renderLinesFromArray:(GLfloat *)lineSlopeAndIntercepts count:(NSUInteger)numberOfLines frameTime:(CMTime)frameTime;
{
if (self.preventRendering)
{
return;
}
if (lineCoordinates == NULL)
{
[self generateLineCoordinates];
}
// Iterate through and generate vertices from the slopes and intercepts
NSUInteger currentVertexIndex = 0;
NSUInteger currentLineIndex = 0;
NSUInteger maxLineIndex = numberOfLines *2;
while(currentLineIndex < maxLineIndex)
{
GLfloat slope = lineSlopeAndIntercepts[currentLineIndex++];
GLfloat intercept = lineSlopeAndIntercepts[currentLineIndex++];
if (slope > 9000.0) // Vertical line
{
lineCoordinates[currentVertexIndex++] = intercept;
lineCoordinates[currentVertexIndex++] = -1.0;
lineCoordinates[currentVertexIndex++] = intercept;
lineCoordinates[currentVertexIndex++] = 1.0;
}
else
{
lineCoordinates[currentVertexIndex++] = -1.0;
lineCoordinates[currentVertexIndex++] = slope * -1.0 + intercept;
lineCoordinates[currentVertexIndex++] = 1.0;
lineCoordinates[currentVertexIndex++] = slope * 1.0 + intercept;
}
}
runSynchronouslyOnVideoProcessingQueue(^{
[GPUImageContext setActiveShaderProgram:filterProgram];
outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:[self sizeOfFBO] textureOptions:self.outputTextureOptions onlyTexture:NO];
[outputFramebuffer activateFramebuffer];
glClearColor(0.0, 0.0, 0.0, 0.0);
glClear(GL_COLOR_BUFFER_BIT);
glBlendEquation(GL_FUNC_ADD);
glBlendFunc(GL_ONE, GL_ONE);
glEnable(GL_BLEND);
glVertexAttribPointer(filterPositionAttribute, 2, GL_FLOAT, 0, 0, lineCoordinates);
glDrawArrays(GL_LINES, 0, ((unsigned int)numberOfLines * 2));
glDisable(GL_BLEND);
[self informTargetsAboutNewFrameAtTime:frameTime];
});
}
- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;
{
// Prevent rendering of the frame by normal means
}
#pragma mark -
#pragma mark Accessors
- (void)setLineWidth:(CGFloat)newValue;
{
_lineWidth = newValue;
[GPUImageContext setActiveShaderProgram:filterProgram];
glLineWidth(newValue);
}
- (void)setLineColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent;
{
GPUVector3 lineColor = {redComponent, greenComponent, blueComponent};
[self setVec3:lineColor forUniform:lineColorUniform program:filterProgram];
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageLineGenerator.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 983 |
```objective-c
#import "GPUImageFilter.h"
/// Creates a bulge distortion on the image
@interface GPUImageBulgeDistortionFilter : GPUImageFilter
{
GLint aspectRatioUniform, radiusUniform, centerUniform, scaleUniform;
}
/// The center about which to apply the distortion, with a default of (0.5, 0.5)
@property(readwrite, nonatomic) CGPoint center;
/// The radius of the distortion, ranging from 0.0 to 1.0, with a default of 0.25
@property(readwrite, nonatomic) CGFloat radius;
/// The amount of distortion to apply, from -1.0 to 1.0, with a default of 0.5
@property(readwrite, nonatomic) CGFloat scale;
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageBulgeDistortionFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 158 |
```objective-c
#import <Foundation/Foundation.h>
#import "GPUImageContext.h"
struct GPUByteColorVector {
GLubyte red;
GLubyte green;
GLubyte blue;
GLubyte alpha;
};
typedef struct GPUByteColorVector GPUByteColorVector;
@protocol GPUImageRawDataProcessor;
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
@interface GPUImageRawDataOutput : NSObject <GPUImageInput> {
CGSize imageSize;
GPUImageRotationMode inputRotation;
BOOL outputBGRA;
}
#else
@interface GPUImageRawDataOutput : NSObject <GPUImageInput> {
CGSize imageSize;
GPUImageRotationMode inputRotation;
BOOL outputBGRA;
}
#endif
@property(readonly) GLubyte *rawBytesForImage;
@property(nonatomic, copy) void(^newFrameAvailableBlock)(void);
@property(nonatomic) BOOL enabled;
// Initialization and teardown
- (id)initWithImageSize:(CGSize)newImageSize resultsInBGRAFormat:(BOOL)resultsInBGRAFormat;
// Data access
- (GPUByteColorVector)colorAtLocation:(CGPoint)locationInImage;
- (NSUInteger)bytesPerRowInOutput;
- (void)setImageSize:(CGSize)newImageSize;
- (void)lockFramebufferForReading;
- (void)unlockFramebufferAfterReading;
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageRawDataOutput.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 265 |
```objective-c
#import "GPUImageExposureFilter.h"
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageExposureFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform highp float exposure;
void main()
{
highp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
gl_FragColor = vec4(textureColor.rgb * pow(2.0, exposure), textureColor.w);
}
);
#else
NSString *const kGPUImageExposureFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform float exposure;
void main()
{
vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
gl_FragColor = vec4(textureColor.rgb * pow(2.0, exposure), textureColor.w);
}
);
#endif
@implementation GPUImageExposureFilter
@synthesize exposure = _exposure;
#pragma mark -
#pragma mark Initialization and teardown
- (id)init;
{
if (!(self = [super initWithFragmentShaderFromString:kGPUImageExposureFragmentShaderString]))
{
return nil;
}
exposureUniform = [filterProgram uniformIndex:@"exposure"];
self.exposure = 0.0;
return self;
}
#pragma mark -
#pragma mark Accessors
- (void)setExposure:(CGFloat)newValue;
{
_exposure = newValue;
[self setFloat:_exposure forUniform:exposureUniform program:filterProgram];
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageExposureFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 337 |
```objective-c
#import "GPUImageHardLightBlendFilter.h"
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageHardLightBlendFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
varying highp vec2 textureCoordinate2;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
const highp vec3 W = vec3(0.2125, 0.7154, 0.0721);
void main()
{
mediump vec4 base = texture2D(inputImageTexture, textureCoordinate);
mediump vec4 overlay = texture2D(inputImageTexture2, textureCoordinate2);
highp float ra;
if (2.0 * overlay.r < overlay.a) {
ra = 2.0 * overlay.r * base.r + overlay.r * (1.0 - base.a) + base.r * (1.0 - overlay.a);
} else {
ra = overlay.a * base.a - 2.0 * (base.a - base.r) * (overlay.a - overlay.r) + overlay.r * (1.0 - base.a) + base.r * (1.0 - overlay.a);
}
highp float ga;
if (2.0 * overlay.g < overlay.a) {
ga = 2.0 * overlay.g * base.g + overlay.g * (1.0 - base.a) + base.g * (1.0 - overlay.a);
} else {
ga = overlay.a * base.a - 2.0 * (base.a - base.g) * (overlay.a - overlay.g) + overlay.g * (1.0 - base.a) + base.g * (1.0 - overlay.a);
}
highp float ba;
if (2.0 * overlay.b < overlay.a) {
ba = 2.0 * overlay.b * base.b + overlay.b * (1.0 - base.a) + base.b * (1.0 - overlay.a);
} else {
ba = overlay.a * base.a - 2.0 * (base.a - base.b) * (overlay.a - overlay.b) + overlay.b * (1.0 - base.a) + base.b * (1.0 - overlay.a);
}
gl_FragColor = vec4(ra, ga, ba, 1.0);
}
);
#else
NSString *const kGPUImageHardLightBlendFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
varying vec2 textureCoordinate2;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
const vec3 W = vec3(0.2125, 0.7154, 0.0721);
void main()
{
vec4 base = texture2D(inputImageTexture, textureCoordinate);
vec4 overlay = texture2D(inputImageTexture2, textureCoordinate2);
float ra;
if (2.0 * overlay.r < overlay.a) {
ra = 2.0 * overlay.r * base.r + overlay.r * (1.0 - base.a) + base.r * (1.0 - overlay.a);
} else {
ra = overlay.a * base.a - 2.0 * (base.a - base.r) * (overlay.a - overlay.r) + overlay.r * (1.0 - base.a) + base.r * (1.0 - overlay.a);
}
float ga;
if (2.0 * overlay.g < overlay.a) {
ga = 2.0 * overlay.g * base.g + overlay.g * (1.0 - base.a) + base.g * (1.0 - overlay.a);
} else {
ga = overlay.a * base.a - 2.0 * (base.a - base.g) * (overlay.a - overlay.g) + overlay.g * (1.0 - base.a) + base.g * (1.0 - overlay.a);
}
float ba;
if (2.0 * overlay.b < overlay.a) {
ba = 2.0 * overlay.b * base.b + overlay.b * (1.0 - base.a) + base.b * (1.0 - overlay.a);
} else {
ba = overlay.a * base.a - 2.0 * (base.a - base.b) * (overlay.a - overlay.b) + overlay.b * (1.0 - base.a) + base.b * (1.0 - overlay.a);
}
gl_FragColor = vec4(ra, ga, ba, 1.0);
}
);
#endif
@implementation GPUImageHardLightBlendFilter
- (id)init;
{
if (!(self = [super initWithFragmentShaderFromString:kGPUImageHardLightBlendFragmentShaderString]))
{
return nil;
}
return self;
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageHardLightBlendFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 1,048 |
```objective-c
#import "GPUImageAmatorkaFilter.h"
#import "GPUImagePicture.h"
#import "GPUImageLookupFilter.h"
@implementation GPUImageAmatorkaFilter
- (id)init;
{
if (!(self = [super init]))
{
return nil;
}
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
UIImage *image = [UIImage imageNamed:@"lookup_amatorka.png"];
#else
NSImage *image = [NSImage imageNamed:@"lookup_amatorka.png"];
#endif
NSAssert(image, @"To use GPUImageAmatorkaFilter you need to add lookup_amatorka.png from GPUImage/framework/Resources to your application bundle.");
lookupImageSource = [[GPUImagePicture alloc] initWithImage:image];
GPUImageLookupFilter *lookupFilter = [[GPUImageLookupFilter alloc] init];
[self addFilter:lookupFilter];
[lookupImageSource addTarget:lookupFilter atTextureLocation:1];
[lookupImageSource processImage];
self.initialFilters = [NSArray arrayWithObjects:lookupFilter, nil];
self.terminalFilter = lookupFilter;
return self;
}
#pragma mark -
#pragma mark Accessors
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageAmatorkaFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 251 |
```objective-c
#import "GPUImageThresholdEdgeDetectionFilter.h"
@interface GPUImageThresholdSketchFilter : GPUImageThresholdEdgeDetectionFilter
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageThresholdSketchFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 27 |
```objective-c
#import "GPUImage3x3TextureSamplingFilter.h"
@interface GPUImageDirectionalSobelEdgeDetectionFilter : GPUImage3x3TextureSamplingFilter
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageDirectionalSobelEdgeDetectionFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 35 |
```objective-c
#import "GPUImageFilter.h"
@interface GPUImageChromaKeyFilter : GPUImageFilter
{
GLint colorToReplaceUniform, thresholdSensitivityUniform, smoothingUniform;
}
/** The threshold sensitivity controls how similar pixels need to be colored to be replaced
The default value is 0.3
*/
@property(readwrite, nonatomic) CGFloat thresholdSensitivity;
/** The degree of smoothing controls how gradually similar colors are replaced in the image
The default value is 0.1
*/
@property(readwrite, nonatomic) CGFloat smoothing;
/** The color to be replaced is specified using individual red, green, and blue components (normalized to 1.0).
The default is green: (0.0, 1.0, 0.0).
@param redComponent Red component of color to be replaced
@param greenComponent Green component of color to be replaced
@param blueComponent Blue component of color to be replaced
*/
- (void)setColorToReplaceRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent;
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageChromaKeyFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 221 |
```objective-c
#import "GPUImageTwoInputFilter.h"
@interface GPUImageAlphaBlendFilter : GPUImageTwoInputFilter
{
GLint mixUniform;
}
// Mix ranges from 0.0 (only image 1) to 1.0 (only image 2), with 1.0 as the normal level
@property(readwrite, nonatomic) CGFloat mix;
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageAlphaBlendFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 77 |
```objective-c
//
// GPUImageCGAColorspaceFilter.m
//
#import "GPUImageCGAColorspaceFilter.h"
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageCGAColorspaceFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
void main()
{
highp vec2 sampleDivisor = vec2(1.0 / 200.0, 1.0 / 320.0);
//highp vec4 colorDivisor = vec4(colorDepth);
highp vec2 samplePos = textureCoordinate - mod(textureCoordinate, sampleDivisor);
highp vec4 color = texture2D(inputImageTexture, samplePos );
//gl_FragColor = texture2D(inputImageTexture, samplePos );
mediump vec4 colorCyan = vec4(85.0 / 255.0, 1.0, 1.0, 1.0);
mediump vec4 colorMagenta = vec4(1.0, 85.0 / 255.0, 1.0, 1.0);
mediump vec4 colorWhite = vec4(1.0, 1.0, 1.0, 1.0);
mediump vec4 colorBlack = vec4(0.0, 0.0, 0.0, 1.0);
mediump vec4 endColor;
highp float blackDistance = distance(color, colorBlack);
highp float whiteDistance = distance(color, colorWhite);
highp float magentaDistance = distance(color, colorMagenta);
highp float cyanDistance = distance(color, colorCyan);
mediump vec4 finalColor;
highp float colorDistance = min(magentaDistance, cyanDistance);
colorDistance = min(colorDistance, whiteDistance);
colorDistance = min(colorDistance, blackDistance);
if (colorDistance == blackDistance) {
finalColor = colorBlack;
} else if (colorDistance == whiteDistance) {
finalColor = colorWhite;
} else if (colorDistance == cyanDistance) {
finalColor = colorCyan;
} else {
finalColor = colorMagenta;
}
gl_FragColor = finalColor;
}
);
#else
NSString *const kGPUImageCGAColorspaceFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
void main()
{
vec2 sampleDivisor = vec2(1.0 / 200.0, 1.0 / 320.0);
//highp vec4 colorDivisor = vec4(colorDepth);
vec2 samplePos = textureCoordinate - mod(textureCoordinate, sampleDivisor);
vec4 color = texture2D(inputImageTexture, samplePos );
//gl_FragColor = texture2D(inputImageTexture, samplePos );
vec4 colorCyan = vec4(85.0 / 255.0, 1.0, 1.0, 1.0);
vec4 colorMagenta = vec4(1.0, 85.0 / 255.0, 1.0, 1.0);
vec4 colorWhite = vec4(1.0, 1.0, 1.0, 1.0);
vec4 colorBlack = vec4(0.0, 0.0, 0.0, 1.0);
vec4 endColor;
float blackDistance = distance(color, colorBlack);
float whiteDistance = distance(color, colorWhite);
float magentaDistance = distance(color, colorMagenta);
float cyanDistance = distance(color, colorCyan);
vec4 finalColor;
float colorDistance = min(magentaDistance, cyanDistance);
colorDistance = min(colorDistance, whiteDistance);
colorDistance = min(colorDistance, blackDistance);
if (colorDistance == blackDistance) {
finalColor = colorBlack;
} else if (colorDistance == whiteDistance) {
finalColor = colorWhite;
} else if (colorDistance == cyanDistance) {
finalColor = colorCyan;
} else {
finalColor = colorMagenta;
}
gl_FragColor = finalColor;
}
);
#endif
@implementation GPUImageCGAColorspaceFilter
- (id)init;
{
if (!(self = [super initWithFragmentShaderFromString:kGPUImageCGAColorspaceFragmentShaderString]))
{
return nil;
}
return self;
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageCGAColorspaceFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 1,001 |
```objective-c
#import "GPUImageFilter.h"
@interface GPUImageOpacityFilter : GPUImageFilter
{
GLint opacityUniform;
}
// Opacity ranges from 0.0 to 1.0, with 1.0 as the normal setting
@property(readwrite, nonatomic) CGFloat opacity;
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageOpacityFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 61 |
```objective-c
#import "GPUImageSepiaFilter.h"
@implementation GPUImageSepiaFilter
- (id)init;
{
if (!(self = [super init]))
{
return nil;
}
self.intensity = 1.0;
self.colorMatrix = (GPUMatrix4x4){
{0.3588, 0.7044, 0.1368, 0.0},
{0.2990, 0.5870, 0.1140, 0.0},
{0.2392, 0.4696, 0.0912 ,0.0},
{0,0,0,1.0},
};
return self;
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageSepiaFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 156 |
```objective-c
#import "GPUImageLowPassFilter.h"
@implementation GPUImageLowPassFilter
@synthesize filterStrength;
- (id)init;
{
if (!(self = [super init]))
{
return nil;
}
// Take in the frame and blend it with the previous one
dissolveBlendFilter = [[GPUImageDissolveBlendFilter alloc] init];
[self addFilter:dissolveBlendFilter];
// Buffer the result to be fed back into the blend
bufferFilter = [[GPUImageBuffer alloc] init];
[self addFilter:bufferFilter];
// Texture location 0 needs to be the original image for the dissolve blend
[bufferFilter addTarget:dissolveBlendFilter atTextureLocation:1];
[dissolveBlendFilter addTarget:bufferFilter];
[dissolveBlendFilter disableSecondFrameCheck];
// To prevent double updating of this filter, disable updates from the sharp image side
// self.inputFilterToIgnoreForUpdates = unsharpMaskFilter;
self.initialFilters = [NSArray arrayWithObject:dissolveBlendFilter];
self.terminalFilter = dissolveBlendFilter;
self.filterStrength = 0.5;
return self;
}
#pragma mark -
#pragma mark Accessors
- (void)setFilterStrength:(CGFloat)newValue;
{
dissolveBlendFilter.mix = newValue;
}
- (CGFloat)filterStrength;
{
return dissolveBlendFilter.mix;
}
- (void)addTarget:(id<GPUImageInput>)newTarget atTextureLocation:(NSInteger)textureLocation;
{
[self.terminalFilter addTarget:newTarget atTextureLocation:textureLocation];
//if use GPUImagePipline,will cause self.termainlFilter removeAllTargets,so need add bufferFilter back
if (self.terminalFilter == dissolveBlendFilter && ![self.terminalFilter.targets containsObject:bufferFilter]) {
[self.terminalFilter addTarget:bufferFilter atTextureLocation:1];
}
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageLowPassFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 418 |
```objective-c
#import "GPUImageFilter.h"
extern NSString *const kGPUImageNearbyTexelSamplingVertexShaderString;
@interface GPUImage3x3TextureSamplingFilter : GPUImageFilter
{
GLint texelWidthUniform, texelHeightUniform;
CGFloat texelWidth, texelHeight;
BOOL hasOverriddenImageSizeFactor;
}
// The texel width and height determines how far out to sample from this texel. By default, this is the normalized width of a pixel, but this can be overridden for different effects.
@property(readwrite, nonatomic) CGFloat texelWidth;
@property(readwrite, nonatomic) CGFloat texelHeight;
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImage3x3TextureSamplingFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 137 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.