text stringlengths 9 39.2M | dir stringlengths 25 226 | lang stringclasses 163 values | created_date timestamp[s] | updated_date timestamp[s] | repo_name stringclasses 751 values | repo_full_name stringclasses 752 values | star int64 1.01k 183k | len_tokens int64 1 18.5M |
|---|---|---|---|---|---|---|---|---|
```objective-c
#import "GPUImageLightenBlendFilter.h"
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageLightenBlendFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
varying highp vec2 textureCoordinate2;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
void main()
{
lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
lowp vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);
gl_FragColor = max(textureColor, textureColor2);
}
);
#else
NSString *const kGPUImageLightenBlendFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
varying vec2 textureCoordinate2;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
void main()
{
vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);
gl_FragColor = max(textureColor, textureColor2);
}
);
#endif
@implementation GPUImageLightenBlendFilter
- (id)init;
{
if (!(self = [super initWithFragmentShaderFromString:kGPUImageLightenBlendFragmentShaderString]))
{
return nil;
}
return self;
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageLightenBlendFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 297 |
```objective-c
#import "GPUImageFilter.h"
@interface GPUImageColorPackingFilter : GPUImageFilter
{
GLint texelWidthUniform, texelHeightUniform;
CGFloat texelWidth, texelHeight;
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageColorPackingFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 47 |
```objective-c
#import "GPUImageSobelEdgeDetectionFilter.h"
@interface GPUImagePrewittEdgeDetectionFilter : GPUImageSobelEdgeDetectionFilter
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImagePrewittEdgeDetectionFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 32 |
```objective-c
#import "GPUImageTwoPassTextureSamplingFilter.h"
// For each pixel, this sets it to the maximum value of the red channel in a rectangular neighborhood extending out dilationRadius pixels from the center.
// This extends out bright features, and is most commonly used with black-and-white thresholded images.
extern NSString *const kGPUImageDilationRadiusOneVertexShaderString;
extern NSString *const kGPUImageDilationRadiusTwoVertexShaderString;
extern NSString *const kGPUImageDilationRadiusThreeVertexShaderString;
extern NSString *const kGPUImageDilationRadiusFourVertexShaderString;
@interface GPUImageDilationFilter : GPUImageTwoPassTextureSamplingFilter
// Acceptable values for dilationRadius, which sets the distance in pixels to sample out from the center, are 1, 2, 3, and 4.
- (id)initWithRadius:(NSUInteger)dilationRadius;
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageDilationFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 184 |
```objective-c
#import "GPUImageTwoInputFilter.h"
@interface GPUImageHueBlendFilter : GPUImageTwoInputFilter
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageHueBlendFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 26 |
```objective-c
#import "GPUImageiOSBlurFilter.h"
#import "GPUImageSaturationFilter.h"
#import "GPUImageGaussianBlurFilter.h"
#import "GPUImageLuminanceRangeFilter.h"
@implementation GPUImageiOSBlurFilter
@synthesize blurRadiusInPixels;
@synthesize saturation;
@synthesize downsampling = _downsampling;
#pragma mark -
#pragma mark Initialization and teardown
- (id)init;
{
if (!(self = [super init]))
{
return nil;
}
// First pass: downsample and desaturate
saturationFilter = [[GPUImageSaturationFilter alloc] init];
[self addFilter:saturationFilter];
// Second pass: apply a strong Gaussian blur
blurFilter = [[GPUImageGaussianBlurFilter alloc] init];
[self addFilter:blurFilter];
// Third pass: upsample and adjust luminance range
luminanceRangeFilter = [[GPUImageLuminanceRangeFilter alloc] init];
[self addFilter:luminanceRangeFilter];
[saturationFilter addTarget:blurFilter];
[blurFilter addTarget:luminanceRangeFilter];
self.initialFilters = [NSArray arrayWithObject:saturationFilter];
self.terminalFilter = luminanceRangeFilter;
self.blurRadiusInPixels = 12.0;
self.saturation = 0.8;
self.downsampling = 4.0;
self.rangeReductionFactor = 0.6;
return self;
}
- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;
{
if (_downsampling > 1.0)
{
CGSize rotatedSize = [saturationFilter rotatedSize:newSize forIndex:textureIndex];
[saturationFilter forceProcessingAtSize:CGSizeMake(rotatedSize.width / _downsampling, rotatedSize.height / _downsampling)];
[luminanceRangeFilter forceProcessingAtSize:rotatedSize];
}
[super setInputSize:newSize atIndex:textureIndex];
}
#pragma mark -
#pragma mark Accessors
// From Apple's UIImage+ImageEffects category:
// A description of how to compute the box kernel width from the Gaussian
// radius (aka standard deviation) appears in the SVG spec:
// path_to_url#feGaussianBlurElement
//
// For larger values of 's' (s >= 2.0), an approximation can be used: Three
// successive box-blurs build a piece-wise quadratic convolution kernel, which
// approximates the Gaussian kernel to within roughly 3%.
//
// let d = floor(s * 3*sqrt(2*pi)/4 + 0.5)
//
// ... if d is odd, use three box-blurs of size 'd', centered on the output pixel.
- (void)setBlurRadiusInPixels:(CGFloat)newValue;
{
blurFilter.blurRadiusInPixels = newValue;
}
- (CGFloat)blurRadiusInPixels;
{
return blurFilter.blurRadiusInPixels;
}
- (void)setSaturation:(CGFloat)newValue;
{
saturationFilter.saturation = newValue;
}
- (CGFloat)saturation;
{
return saturationFilter.saturation;
}
- (void)setDownsampling:(CGFloat)newValue;
{
_downsampling = newValue;
}
- (void)setRangeReductionFactor:(CGFloat)rangeReductionFactor
{
luminanceRangeFilter.rangeReductionFactor = rangeReductionFactor;
}
- (CGFloat)rangeReductionFactor
{
return luminanceRangeFilter.rangeReductionFactor;
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageiOSBlurFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 736 |
```objective-c
#import "GPUImageSphereRefractionFilter.h"
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageSphereRefractionFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform highp vec2 center;
uniform highp float radius;
uniform highp float aspectRatio;
uniform highp float refractiveIndex;
void main()
{
highp vec2 textureCoordinateToUse = vec2(textureCoordinate.x, (textureCoordinate.y * aspectRatio + 0.5 - 0.5 * aspectRatio));
highp float distanceFromCenter = distance(center, textureCoordinateToUse);
lowp float checkForPresenceWithinSphere = step(distanceFromCenter, radius);
distanceFromCenter = distanceFromCenter / radius;
highp float normalizedDepth = radius * sqrt(1.0 - distanceFromCenter * distanceFromCenter);
highp vec3 sphereNormal = normalize(vec3(textureCoordinateToUse - center, normalizedDepth));
highp vec3 refractedVector = refract(vec3(0.0, 0.0, -1.0), sphereNormal, refractiveIndex);
gl_FragColor = texture2D(inputImageTexture, (refractedVector.xy + 1.0) * 0.5) * checkForPresenceWithinSphere;
}
);
#else
NSString *const kGPUImageSphereRefractionFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform vec2 center;
uniform float radius;
uniform float aspectRatio;
uniform float refractiveIndex;
void main()
{
vec2 textureCoordinateToUse = vec2(textureCoordinate.x, (textureCoordinate.y * aspectRatio + 0.5 - 0.5 * aspectRatio));
float distanceFromCenter = distance(center, textureCoordinateToUse);
float checkForPresenceWithinSphere = step(distanceFromCenter, radius);
distanceFromCenter = distanceFromCenter / radius;
float normalizedDepth = radius * sqrt(1.0 - distanceFromCenter * distanceFromCenter);
vec3 sphereNormal = normalize(vec3(textureCoordinateToUse - center, normalizedDepth));
vec3 refractedVector = refract(vec3(0.0, 0.0, -1.0), sphereNormal, refractiveIndex);
gl_FragColor = texture2D(inputImageTexture, (refractedVector.xy + 1.0) * 0.5) * checkForPresenceWithinSphere;
}
);
#endif
@interface GPUImageSphereRefractionFilter ()
- (void)adjustAspectRatio;
@property (readwrite, nonatomic) CGFloat aspectRatio;
@end
@implementation GPUImageSphereRefractionFilter
@synthesize center = _center;
@synthesize radius = _radius;
@synthesize aspectRatio = _aspectRatio;
@synthesize refractiveIndex = _refractiveIndex;
#pragma mark -
#pragma mark Initialization and teardown
- (id)init;
{
if (!(self = [self initWithFragmentShaderFromString:kGPUImageSphereRefractionFragmentShaderString]))
{
return nil;
}
return self;
}
- (id)initWithFragmentShaderFromString:(NSString *)fragmentShaderString;
{
if (!(self = [super initWithFragmentShaderFromString:fragmentShaderString]))
{
return nil;
}
radiusUniform = [filterProgram uniformIndex:@"radius"];
aspectRatioUniform = [filterProgram uniformIndex:@"aspectRatio"];
centerUniform = [filterProgram uniformIndex:@"center"];
refractiveIndexUniform = [filterProgram uniformIndex:@"refractiveIndex"];
self.radius = 0.25;
self.center = CGPointMake(0.5, 0.5);
self.refractiveIndex = 0.71;
[self setBackgroundColorRed:0.0 green:0.0 blue:0.0 alpha:0.0];
return self;
}
- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;
{
CGSize oldInputSize = inputTextureSize;
[super setInputSize:newSize atIndex:textureIndex];
if (!CGSizeEqualToSize(oldInputSize, inputTextureSize) && (!CGSizeEqualToSize(newSize, CGSizeZero)) )
{
[self adjustAspectRatio];
}
}
#pragma mark -
#pragma mark Accessors
- (void)adjustAspectRatio;
{
if (GPUImageRotationSwapsWidthAndHeight(inputRotation))
{
[self setAspectRatio:(inputTextureSize.width / inputTextureSize.height)];
}
else
{
[self setAspectRatio:(inputTextureSize.height / inputTextureSize.width)];
}
}
- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;
{
[super setInputRotation:newInputRotation atIndex:textureIndex];
[self setCenter:self.center];
[self adjustAspectRatio];
}
- (void)forceProcessingAtSize:(CGSize)frameSize;
{
[super forceProcessingAtSize:frameSize];
[self adjustAspectRatio];
}
- (void)setRadius:(CGFloat)newValue;
{
_radius = newValue;
[self setFloat:_radius forUniform:radiusUniform program:filterProgram];
}
- (void)setCenter:(CGPoint)newValue;
{
_center = newValue;
CGPoint rotatedPoint = [self rotatedPoint:_center forRotation:inputRotation];
[self setPoint:rotatedPoint forUniform:centerUniform program:filterProgram];
}
- (void)setAspectRatio:(CGFloat)newValue;
{
_aspectRatio = newValue;
[self setFloat:_aspectRatio forUniform:aspectRatioUniform program:filterProgram];
}
- (void)setRefractiveIndex:(CGFloat)newValue;
{
_refractiveIndex = newValue;
[self setFloat:_refractiveIndex forUniform:refractiveIndexUniform program:filterProgram];
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageSphereRefractionFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 1,243 |
```objective-c
#import "GPUImageGrayscaleFilter.h"
@implementation GPUImageGrayscaleFilter
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageLuminanceFragmentShaderString = SHADER_STRING
(
precision highp float;
varying vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
const highp vec3 W = vec3(0.2125, 0.7154, 0.0721);
void main()
{
lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
float luminance = dot(textureColor.rgb, W);
gl_FragColor = vec4(vec3(luminance), textureColor.a);
}
);
#else
NSString *const kGPUImageLuminanceFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
const vec3 W = vec3(0.2125, 0.7154, 0.0721);
void main()
{
vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
float luminance = dot(textureColor.rgb, W);
gl_FragColor = vec4(vec3(luminance), textureColor.a);
}
);
#endif
- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;
{
if (!currentlyReceivingMonochromeInput)
{
[super renderToTextureWithVertices:vertices textureCoordinates:textureCoordinates];
}
}
//- (void)setInputTexture:(GLuint)newInputTexture atIndex:(NSInteger)textureIndex;
//{
// [super setInputTexture:newInputTexture atIndex:textureIndex];
// if (currentlyReceivingMonochromeInput)
// {
// [self notifyTargetsAboutNewOutputTexture];
// }
//}
//- (GLuint)textureForOutput;
//{
// if (currentlyReceivingMonochromeInput)
// {
// return filterSourceTexture;
// }
// else
// {
// return outputTexture;
// }
//}
- (BOOL)wantsMonochromeInput;
{
// return YES;
return NO;
}
- (BOOL)providesMonochromeOutput;
{
// return YES;
return NO;
}
// TODO: Rewrite this based on the new GPUImageFilter implementation
//- (void)informTargetsAboutNewFrameAtTime:(CMTime)frameTime;
//{
// if (self.frameProcessingCompletionBlock != NULL)
// {
// self.frameProcessingCompletionBlock(self, frameTime);
// }
//
// for (id<GPUImageInput> currentTarget in targets)
// {
// if (currentTarget != self.targetToIgnoreForUpdates)
// {
// NSInteger indexOfObject = [targets indexOfObject:currentTarget];
// NSInteger textureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
//
// if ([GPUImageContext supportsFastTextureUpload] && preparedToCaptureImage)
// {
// [self setInputTextureForTarget:currentTarget atIndex:textureIndex];
// }
//
// if (currentlyReceivingMonochromeInput)
// {
// [currentTarget setInputRotation:inputRotation atIndex:textureIndex];
//
// CGSize sizeToRotate = [self outputFrameSize];
// CGSize rotatedSize = sizeToRotate;
// if (GPUImageRotationSwapsWidthAndHeight(inputRotation))
// {
// rotatedSize.width = sizeToRotate.height;
// rotatedSize.height = sizeToRotate.width;
// }
// [currentTarget setInputSize:rotatedSize atIndex:textureIndex];
// }
// else
// {
// [currentTarget setInputSize:[self outputFrameSize] atIndex:textureIndex];
// }
// [currentTarget newFrameReadyAtTime:frameTime atIndex:textureIndex];
// }
// }
//}
#pragma mark -
#pragma mark Initialization and teardown
- (id)init;
{
if (!(self = [super initWithFragmentShaderFromString:kGPUImageLuminanceFragmentShaderString]))
{
return nil;
}
return self;
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageGrayscaleFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 870 |
```objective-c
#import "GPUImageThresholdEdgeDetectionFilter.h"
@implementation GPUImageThresholdEdgeDetectionFilter
// Invert the colorspace for a sketch
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageThresholdEdgeDetectionFragmentShaderString = SHADER_STRING
(
precision highp float;
varying vec2 textureCoordinate;
varying vec2 leftTextureCoordinate;
varying vec2 rightTextureCoordinate;
varying vec2 topTextureCoordinate;
varying vec2 topLeftTextureCoordinate;
varying vec2 topRightTextureCoordinate;
varying vec2 bottomTextureCoordinate;
varying vec2 bottomLeftTextureCoordinate;
varying vec2 bottomRightTextureCoordinate;
uniform sampler2D inputImageTexture;
uniform lowp float threshold;
uniform float edgeStrength;
void main()
{
// float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r;
// float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r;
// float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r;
// float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r;
float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r;
float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r;
float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r;
float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r;
float centerIntensity = texture2D(inputImageTexture, textureCoordinate).r;
// float h = -topLeftIntensity - 2.0 * topIntensity - topRightIntensity + bottomLeftIntensity + 2.0 * bottomIntensity + bottomRightIntensity;
// float v = -bottomLeftIntensity - 2.0 * leftIntensity - topLeftIntensity + bottomRightIntensity + 2.0 * rightIntensity + topRightIntensity;
// float h = -topLeftIntensity - 2.0 * topIntensity - topRightIntensity + leftIntensity + 2.0 * centerIntensity + rightIntensity;
// float v = -bottomLeftIntensity - 2.0 * leftIntensity - topLeftIntensity + bottomIntensity + 2.0 * centerIntensity + topIntensity;
float h = (centerIntensity - topIntensity) + (bottomIntensity - centerIntensity);
float v = (centerIntensity - leftIntensity) + (rightIntensity - centerIntensity);
// float h = (centerIntensity - topIntensity);
// float j = (topIntensity - centerIntensity);
// h = max(h,j);
// j = abs(h);
// float v = (centerIntensity - leftIntensity);
float mag = length(vec2(h, v)) * edgeStrength;
mag = step(threshold, mag);
// float mag = abs(h);
// gl_FragColor = vec4(h, h, h, 1.0);
// gl_FragColor = vec4(texture2D(inputImageTexture, textureCoordinate));
// gl_FragColor = vec4(h, centerIntensity, j, 1.0);
gl_FragColor = vec4(mag, mag, mag, 1.0);
}
);
#else
NSString *const kGPUImageThresholdEdgeDetectionFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
varying vec2 leftTextureCoordinate;
varying vec2 rightTextureCoordinate;
varying vec2 topTextureCoordinate;
varying vec2 topLeftTextureCoordinate;
varying vec2 topRightTextureCoordinate;
varying vec2 bottomTextureCoordinate;
varying vec2 bottomLeftTextureCoordinate;
varying vec2 bottomRightTextureCoordinate;
uniform sampler2D inputImageTexture;
uniform float threshold;
uniform float edgeStrength;
void main()
{
float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r;
float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r;
float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r;
float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r;
float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r;
float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r;
float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r;
float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r;
float h = -topLeftIntensity - 2.0 * topIntensity - topRightIntensity + bottomLeftIntensity + 2.0 * bottomIntensity + bottomRightIntensity;
h = max(0.0, h);
float v = -bottomLeftIntensity - 2.0 * leftIntensity - topLeftIntensity + bottomRightIntensity + 2.0 * rightIntensity + topRightIntensity;
v = max(0.0, v);
float mag = length(vec2(h, v)) * edgeStrength;
mag = step(threshold, mag);
gl_FragColor = vec4(vec3(mag), 1.0);
}
);
#endif
#pragma mark -
#pragma mark Initialization and teardown
@synthesize threshold = _threshold;
- (id)initWithFragmentShaderFromString:(NSString *)fragmentShaderString;
{
if (!(self = [super initWithFragmentShaderFromString:fragmentShaderString]))
{
return nil;
}
thresholdUniform = [secondFilterProgram uniformIndex:@"threshold"];
self.threshold = 0.25;
self.edgeStrength = 1.0;
return self;
}
- (id)init;
{
if (!(self = [self initWithFragmentShaderFromString:kGPUImageThresholdEdgeDetectionFragmentShaderString]))
{
return nil;
}
return self;
}
#pragma mark -
#pragma mark Accessors
- (void)setThreshold:(CGFloat)newValue;
{
_threshold = newValue;
[self setFloat:_threshold forUniform:thresholdUniform program:secondFilterProgram];
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageThresholdEdgeDetectionFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 1,264 |
```objective-c
#import "GPUImageSobelEdgeDetectionFilter.h"
/** Converts video to look like a sketch.
This is just the Sobel edge detection filter with the colors inverted.
*/
@interface GPUImageSketchFilter : GPUImageSobelEdgeDetectionFilter
{
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageSketchFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 55 |
```objective-c
#import "GPUImagePerlinNoiseFilter.h"
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImagePerlinNoiseFragmentShaderString = SHADER_STRING
(
precision highp float;
varying highp vec2 textureCoordinate;
uniform float scale;
uniform vec4 colorStart;
uniform vec4 colorFinish;
//
// Description : Array and textureless GLSL 2D/3D/4D simplex
// noise functions.
// Author : Ian McEwan, Ashima Arts.
// Maintainer : ijm
// Lastmod : 20110822 (ijm)
// path_to_url
//
vec4 mod289(vec4 x)
{
return x - floor(x * (1.0 / 289.0)) * 289.0;
}
vec4 permute(vec4 x)
{
return mod289(((x*34.0)+1.0)*x);
}
vec4 taylorInvSqrt(vec4 r)
{
return 1.79284291400159 - 0.85373472095314 * r;
}
vec2 fade(vec2 t) {
return t*t*t*(t*(t*6.0-15.0)+10.0);
}
// Classic Perlin noise
float cnoise(vec2 P)
{
vec4 Pi = floor(P.xyxy) + vec4(0.0, 0.0, 1.0, 1.0);
vec4 Pf = fract(P.xyxy) - vec4(0.0, 0.0, 1.0, 1.0);
Pi = mod289(Pi); // To avoid truncation effects in permutation
vec4 ix = Pi.xzxz;
vec4 iy = Pi.yyww;
vec4 fx = Pf.xzxz;
vec4 fy = Pf.yyww;
vec4 i = permute(permute(ix) + iy);
vec4 gx = fract(i * (1.0 / 41.0)) * 2.0 - 1.0 ;
vec4 gy = abs(gx) - 0.5 ;
vec4 tx = floor(gx + 0.5);
gx = gx - tx;
vec2 g00 = vec2(gx.x,gy.x);
vec2 g10 = vec2(gx.y,gy.y);
vec2 g01 = vec2(gx.z,gy.z);
vec2 g11 = vec2(gx.w,gy.w);
vec4 norm = taylorInvSqrt(vec4(dot(g00, g00), dot(g01, g01), dot(g10, g10), dot(g11, g11)));
g00 *= norm.x;
g01 *= norm.y;
g10 *= norm.z;
g11 *= norm.w;
float n00 = dot(g00, vec2(fx.x, fy.x));
float n10 = dot(g10, vec2(fx.y, fy.y));
float n01 = dot(g01, vec2(fx.z, fy.z));
float n11 = dot(g11, vec2(fx.w, fy.w));
vec2 fade_xy = fade(Pf.xy);
vec2 n_x = mix(vec2(n00, n01), vec2(n10, n11), fade_xy.x);
float n_xy = mix(n_x.x, n_x.y, fade_xy.y);
return 2.3 * n_xy;
}
void main()
{
float n1 = (cnoise(textureCoordinate * scale) + 1.0) / 2.0;
vec4 colorDiff = colorFinish - colorStart;
vec4 color = colorStart + colorDiff * n1;
gl_FragColor = color;
}
);
#else
NSString *const kGPUImagePerlinNoiseFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
uniform float scale;
uniform vec4 colorStart;
uniform vec4 colorFinish;
//
// Description : Array and textureless GLSL 2D/3D/4D simplex
// noise functions.
// Author : Ian McEwan, Ashima Arts.
// Maintainer : ijm
// Lastmod : 20110822 (ijm)
// path_to_url
//
vec4 mod289(vec4 x)
{
return x - floor(x * (1.0 / 289.0)) * 289.0;
}
vec4 permute(vec4 x)
{
return mod289(((x*34.0)+1.0)*x);
}
vec4 taylorInvSqrt(vec4 r)
{
return 1.79284291400159 - 0.85373472095314 * r;
}
vec2 fade(vec2 t) {
return t*t*t*(t*(t*6.0-15.0)+10.0);
}
// Classic Perlin noise
float cnoise(vec2 P)
{
vec4 Pi = floor(P.xyxy) + vec4(0.0, 0.0, 1.0, 1.0);
vec4 Pf = fract(P.xyxy) - vec4(0.0, 0.0, 1.0, 1.0);
Pi = mod289(Pi); // To avoid truncation effects in permutation
vec4 ix = Pi.xzxz;
vec4 iy = Pi.yyww;
vec4 fx = Pf.xzxz;
vec4 fy = Pf.yyww;
vec4 i = permute(permute(ix) + iy);
vec4 gx = fract(i * (1.0 / 41.0)) * 2.0 - 1.0 ;
vec4 gy = abs(gx) - 0.5 ;
vec4 tx = floor(gx + 0.5);
gx = gx - tx;
vec2 g00 = vec2(gx.x,gy.x);
vec2 g10 = vec2(gx.y,gy.y);
vec2 g01 = vec2(gx.z,gy.z);
vec2 g11 = vec2(gx.w,gy.w);
vec4 norm = taylorInvSqrt(vec4(dot(g00, g00), dot(g01, g01), dot(g10, g10), dot(g11, g11)));
g00 *= norm.x;
g01 *= norm.y;
g10 *= norm.z;
g11 *= norm.w;
float n00 = dot(g00, vec2(fx.x, fy.x));
float n10 = dot(g10, vec2(fx.y, fy.y));
float n01 = dot(g01, vec2(fx.z, fy.z));
float n11 = dot(g11, vec2(fx.w, fy.w));
vec2 fade_xy = fade(Pf.xy);
vec2 n_x = mix(vec2(n00, n01), vec2(n10, n11), fade_xy.x);
float n_xy = mix(n_x.x, n_x.y, fade_xy.y);
return 2.3 * n_xy;
}
void main()
{
float n1 = (cnoise(textureCoordinate * scale) + 1.0) / 2.0;
vec4 colorDiff = colorFinish - colorStart;
vec4 color = colorStart + colorDiff * n1;
gl_FragColor = color;
}
);
#endif
@implementation GPUImagePerlinNoiseFilter
@synthesize scale = _scale, colorStart = _colorStart, colorFinish = _colorFinish;
#pragma mark -
#pragma mark Initialization and teardown
- (id)init;
{
if (!(self = [super initWithFragmentShaderFromString:kGPUImagePerlinNoiseFragmentShaderString]))
{
return nil;
}
scaleUniform = [filterProgram uniformIndex:@"scale"];
colorStartUniform = [filterProgram uniformIndex:@"colorStart"];
colorFinishUniform = [filterProgram uniformIndex:@"colorFinish"];
[self setScale:8.0];
[self setColorStart:(GPUVector4){0.0, 0.0, 0.0, 1.0}];
[self setColorFinish:(GPUVector4){1.0, 1.0, 1.0, 1.0}];
return self;
}
#pragma mark -
#pragma mark Accessors
- (void)setScale:(float)scale
{
_scale = scale;
[self setFloat:_scale forUniform:scaleUniform program:filterProgram];
}
- (void)setColorStart:(GPUVector4)colorStart
{
_colorStart = colorStart;
[self setVec4:_colorStart forUniform:colorStartUniform program:filterProgram];
}
- (void)setColorFinish:(GPUVector4)colorFinish
{
_colorFinish = colorFinish;
[self setVec4:_colorFinish forUniform:colorFinishUniform program:filterProgram];
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImagePerlinNoiseFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 1,947 |
```objective-c
//
// GPUImageMovieComposition.h
// Givit
//
// Created by Sean Meiners on 2013/01/25.
//
//
#import "GPUImageMovie.h"
@interface GPUImageMovieComposition : GPUImageMovie
@property (readwrite, retain) AVComposition *compositon;
@property (readwrite, retain) AVVideoComposition *videoComposition;
@property (readwrite, retain) AVAudioMix *audioMix;
- (id)initWithComposition:(AVComposition*)compositon
andVideoComposition:(AVVideoComposition*)videoComposition
andAudioMix:(AVAudioMix*)audioMix;
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageMovieComposition.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 133 |
```objective-c
#import "GPUImageHighPassFilter.h"
@implementation GPUImageHighPassFilter
@synthesize filterStrength;
- (id)init;
{
if (!(self = [super init]))
{
return nil;
}
// Start with a low pass filter to define the component to be removed
lowPassFilter = [[GPUImageLowPassFilter alloc] init];
[self addFilter:lowPassFilter];
// Take the difference of the current frame from the low pass filtered result to get the high pass
differenceBlendFilter = [[GPUImageDifferenceBlendFilter alloc] init];
[self addFilter:differenceBlendFilter];
// Texture location 0 needs to be the original image for the difference blend
[lowPassFilter addTarget:differenceBlendFilter atTextureLocation:1];
self.initialFilters = [NSArray arrayWithObjects:lowPassFilter, differenceBlendFilter, nil];
self.terminalFilter = differenceBlendFilter;
self.filterStrength = 0.5;
return self;
}
#pragma mark -
#pragma mark Accessors
- (void)setFilterStrength:(CGFloat)newValue;
{
lowPassFilter.filterStrength = newValue;
}
- (CGFloat)filterStrength;
{
return lowPassFilter.filterStrength;
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageHighPassFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 264 |
```objective-c
#import "GPUImageFilterGroup.h"
@class GPUImagePicture;
/** A photo filter based on Soft Elegance Photoshop action
path_to_url
*/
// Note: If you want to use this effect you have to add
// lookup_soft_elegance_1.png and lookup_soft_elegance_2.png
// from Resources folder to your application bundle.
@interface GPUImageSoftEleganceFilter : GPUImageFilterGroup
{
GPUImagePicture *lookupImageSource1;
GPUImagePicture *lookupImageSource2;
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageSoftEleganceFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 116 |
```objective-c
#import "GPUImageOutput.h"
#import "GPUImageMovieWriter.h"
#import "GPUImagePicture.h"
#import <mach/mach.h>
void runOnMainQueueWithoutDeadlocking(void (^block)(void))
{
if ([NSThread isMainThread])
{
block();
}
else
{
dispatch_sync(dispatch_get_main_queue(), block);
}
}
void runSynchronouslyOnVideoProcessingQueue(void (^block)(void))
{
dispatch_queue_t videoProcessingQueue = [GPUImageContext sharedContextQueue];
#if !OS_OBJECT_USE_OBJC
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wdeprecated-declarations"
if (dispatch_get_current_queue() == videoProcessingQueue)
#pragma clang diagnostic pop
#else
if (dispatch_get_specific([GPUImageContext contextKey]))
#endif
{
block();
}else
{
dispatch_sync(videoProcessingQueue, block);
}
}
void runAsynchronouslyOnVideoProcessingQueue(void (^block)(void))
{
dispatch_queue_t videoProcessingQueue = [GPUImageContext sharedContextQueue];
#if !OS_OBJECT_USE_OBJC
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wdeprecated-declarations"
if (dispatch_get_current_queue() == videoProcessingQueue)
#pragma clang diagnostic pop
#else
if (dispatch_get_specific([GPUImageContext contextKey]))
#endif
{
block();
}else
{
dispatch_async(videoProcessingQueue, block);
}
}
void runSynchronouslyOnContextQueue(GPUImageContext *context, void (^block)(void))
{
dispatch_queue_t videoProcessingQueue = [context contextQueue];
#if !OS_OBJECT_USE_OBJC
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wdeprecated-declarations"
if (dispatch_get_current_queue() == videoProcessingQueue)
#pragma clang diagnostic pop
#else
if (dispatch_get_specific([GPUImageContext contextKey]))
#endif
{
block();
}else
{
dispatch_sync(videoProcessingQueue, block);
}
}
void runAsynchronouslyOnContextQueue(GPUImageContext *context, void (^block)(void))
{
dispatch_queue_t videoProcessingQueue = [context contextQueue];
#if !OS_OBJECT_USE_OBJC
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wdeprecated-declarations"
if (dispatch_get_current_queue() == videoProcessingQueue)
#pragma clang diagnostic pop
#else
if (dispatch_get_specific([GPUImageContext contextKey]))
#endif
{
block();
}else
{
dispatch_async(videoProcessingQueue, block);
}
}
void reportAvailableMemoryForGPUImage(NSString *tag)
{
if (!tag)
tag = @"Default";
struct task_basic_info info;
mach_msg_type_number_t size = sizeof(info);
kern_return_t kerr = task_info(mach_task_self(),
TASK_BASIC_INFO,
(task_info_t)&info,
&size);
if( kerr == KERN_SUCCESS ) {
NSLog(@"%@ - Memory used: %u", tag, (unsigned int)info.resident_size); //in bytes
} else {
NSLog(@"%@ - Error: %s", tag, mach_error_string(kerr));
}
}
@implementation GPUImageOutput
@synthesize shouldSmoothlyScaleOutput = _shouldSmoothlyScaleOutput;
@synthesize shouldIgnoreUpdatesToThisTarget = _shouldIgnoreUpdatesToThisTarget;
@synthesize audioEncodingTarget = _audioEncodingTarget;
@synthesize targetToIgnoreForUpdates = _targetToIgnoreForUpdates;
@synthesize frameProcessingCompletionBlock = _frameProcessingCompletionBlock;
@synthesize enabled = _enabled;
@synthesize outputTextureOptions = _outputTextureOptions;
#pragma mark -
#pragma mark Initialization and teardown
- (id)init;
{
if (!(self = [super init]))
{
return nil;
}
targets = [[NSMutableArray alloc] init];
targetTextureIndices = [[NSMutableArray alloc] init];
_enabled = YES;
allTargetsWantMonochromeData = YES;
usingNextFrameForImageCapture = NO;
// set default texture options
_outputTextureOptions.minFilter = GL_LINEAR;
_outputTextureOptions.magFilter = GL_LINEAR;
_outputTextureOptions.wrapS = GL_CLAMP_TO_EDGE;
_outputTextureOptions.wrapT = GL_CLAMP_TO_EDGE;
_outputTextureOptions.internalFormat = GL_RGBA;
_outputTextureOptions.format = GL_BGRA;
_outputTextureOptions.type = GL_UNSIGNED_BYTE;
return self;
}
- (void)dealloc
{
[self removeAllTargets];
}
#pragma mark -
#pragma mark Managing targets
- (void)setInputFramebufferForTarget:(id<GPUImageInput>)target atIndex:(NSInteger)inputTextureIndex;
{
[target setInputFramebuffer:[self framebufferForOutput] atIndex:inputTextureIndex];
}
- (GPUImageFramebuffer *)framebufferForOutput;
{
return outputFramebuffer;
}
- (void)removeOutputFramebuffer;
{
outputFramebuffer = nil;
}
- (void)notifyTargetsAboutNewOutputTexture;
{
for (id<GPUImageInput> currentTarget in targets)
{
NSInteger indexOfObject = [targets indexOfObject:currentTarget];
NSInteger textureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
[self setInputFramebufferForTarget:currentTarget atIndex:textureIndex];
}
}
- (NSArray*)targets;
{
return [NSArray arrayWithArray:targets];
}
- (void)addTarget:(id<GPUImageInput>)newTarget;
{
NSInteger nextAvailableTextureIndex = [newTarget nextAvailableTextureIndex];
[self addTarget:newTarget atTextureLocation:nextAvailableTextureIndex];
if ([newTarget shouldIgnoreUpdatesToThisTarget])
{
_targetToIgnoreForUpdates = newTarget;
}
}
- (void)addTarget:(id<GPUImageInput>)newTarget atTextureLocation:(NSInteger)textureLocation;
{
if([targets containsObject:newTarget])
{
return;
}
cachedMaximumOutputSize = CGSizeZero;
runSynchronouslyOnVideoProcessingQueue(^{
[self setInputFramebufferForTarget:newTarget atIndex:textureLocation];
[targets addObject:newTarget];
[targetTextureIndices addObject:[NSNumber numberWithInteger:textureLocation]];
allTargetsWantMonochromeData = allTargetsWantMonochromeData && [newTarget wantsMonochromeInput];
});
}
- (void)removeTarget:(id<GPUImageInput>)targetToRemove;
{
if(![targets containsObject:targetToRemove])
{
return;
}
if (_targetToIgnoreForUpdates == targetToRemove)
{
_targetToIgnoreForUpdates = nil;
}
cachedMaximumOutputSize = CGSizeZero;
NSInteger indexOfObject = [targets indexOfObject:targetToRemove];
NSInteger textureIndexOfTarget = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
runSynchronouslyOnVideoProcessingQueue(^{
[targetToRemove setInputSize:CGSizeZero atIndex:textureIndexOfTarget];
[targetToRemove setInputRotation:kGPUImageNoRotation atIndex:textureIndexOfTarget];
[targetTextureIndices removeObjectAtIndex:indexOfObject];
[targets removeObject:targetToRemove];
[targetToRemove endProcessing];
});
}
- (void)removeAllTargets;
{
cachedMaximumOutputSize = CGSizeZero;
runSynchronouslyOnVideoProcessingQueue(^{
for (id<GPUImageInput> targetToRemove in targets)
{
NSInteger indexOfObject = [targets indexOfObject:targetToRemove];
NSInteger textureIndexOfTarget = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
[targetToRemove setInputSize:CGSizeZero atIndex:textureIndexOfTarget];
[targetToRemove setInputRotation:kGPUImageNoRotation atIndex:textureIndexOfTarget];
}
[targets removeAllObjects];
[targetTextureIndices removeAllObjects];
allTargetsWantMonochromeData = YES;
});
}
#pragma mark -
#pragma mark Manage the output texture
- (void)forceProcessingAtSize:(CGSize)frameSize;
{
}
- (void)forceProcessingAtSizeRespectingAspectRatio:(CGSize)frameSize;
{
}
#pragma mark -
#pragma mark Still image processing
- (void)useNextFrameForImageCapture;
{
}
- (CGImageRef)newCGImageFromCurrentlyProcessedOutput;
{
return nil;
}
- (CGImageRef)newCGImageByFilteringCGImage:(CGImageRef)imageToFilter;
{
GPUImagePicture *stillImageSource = [[GPUImagePicture alloc] initWithCGImage:imageToFilter];
[self useNextFrameForImageCapture];
[stillImageSource addTarget:(id<GPUImageInput>)self];
[stillImageSource processImage];
CGImageRef processedImage = [self newCGImageFromCurrentlyProcessedOutput];
[stillImageSource removeTarget:(id<GPUImageInput>)self];
return processedImage;
}
- (BOOL)providesMonochromeOutput;
{
return NO;
}
#pragma mark -
#pragma mark Platform-specific image output methods
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
- (UIImage *)imageFromCurrentFramebuffer;
{
UIDeviceOrientation deviceOrientation = [[UIDevice currentDevice] orientation];
UIImageOrientation imageOrientation = UIImageOrientationLeft;
switch (deviceOrientation)
{
case UIDeviceOrientationPortrait:
imageOrientation = UIImageOrientationUp;
break;
case UIDeviceOrientationPortraitUpsideDown:
imageOrientation = UIImageOrientationDown;
break;
case UIDeviceOrientationLandscapeLeft:
imageOrientation = UIImageOrientationLeft;
break;
case UIDeviceOrientationLandscapeRight:
imageOrientation = UIImageOrientationRight;
break;
default:
imageOrientation = UIImageOrientationUp;
break;
}
return [self imageFromCurrentFramebufferWithOrientation:imageOrientation];
}
- (UIImage *)imageFromCurrentFramebufferWithOrientation:(UIImageOrientation)imageOrientation;
{
CGImageRef cgImageFromBytes = [self newCGImageFromCurrentlyProcessedOutput];
UIImage *finalImage = [UIImage imageWithCGImage:cgImageFromBytes scale:1.0 orientation:imageOrientation];
CGImageRelease(cgImageFromBytes);
return finalImage;
}
- (UIImage *)imageByFilteringImage:(UIImage *)imageToFilter;
{
CGImageRef image = [self newCGImageByFilteringCGImage:[imageToFilter CGImage]];
UIImage *processedImage = [UIImage imageWithCGImage:image scale:[imageToFilter scale] orientation:[imageToFilter imageOrientation]];
CGImageRelease(image);
return processedImage;
}
- (CGImageRef)newCGImageByFilteringImage:(UIImage *)imageToFilter
{
return [self newCGImageByFilteringCGImage:[imageToFilter CGImage]];
}
#else
- (NSImage *)imageFromCurrentFramebuffer;
{
return [self imageFromCurrentFramebufferWithOrientation:UIImageOrientationLeft];
}
- (NSImage *)imageFromCurrentFramebufferWithOrientation:(UIImageOrientation)imageOrientation;
{
CGImageRef cgImageFromBytes = [self newCGImageFromCurrentlyProcessedOutput];
NSImage *finalImage = [[NSImage alloc] initWithCGImage:cgImageFromBytes size:NSZeroSize];
CGImageRelease(cgImageFromBytes);
return finalImage;
}
- (NSImage *)imageByFilteringImage:(NSImage *)imageToFilter;
{
CGImageRef image = [self newCGImageByFilteringCGImage:[imageToFilter CGImageForProposedRect:NULL context:[NSGraphicsContext currentContext] hints:nil]];
NSImage *processedImage = [[NSImage alloc] initWithCGImage:image size:NSZeroSize];
CGImageRelease(image);
return processedImage;
}
- (CGImageRef)newCGImageByFilteringImage:(NSImage *)imageToFilter
{
return [self newCGImageByFilteringCGImage:[imageToFilter CGImageForProposedRect:NULL context:[NSGraphicsContext currentContext] hints:nil]];
}
#endif
#pragma mark -
#pragma mark Accessors
- (void)setAudioEncodingTarget:(GPUImageMovieWriter *)newValue;
{
_audioEncodingTarget = newValue;
if( ! _audioEncodingTarget.hasAudioTrack )
{
_audioEncodingTarget.hasAudioTrack = YES;
}
}
-(void)setOutputTextureOptions:(GPUTextureOptions)outputTextureOptions
{
_outputTextureOptions = outputTextureOptions;
if( outputFramebuffer.texture )
{
glBindTexture(GL_TEXTURE_2D, outputFramebuffer.texture);
//_outputTextureOptions.format
//_outputTextureOptions.internalFormat
//_outputTextureOptions.magFilter
//_outputTextureOptions.minFilter
//_outputTextureOptions.type
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, _outputTextureOptions.wrapS);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, _outputTextureOptions.wrapT);
glBindTexture(GL_TEXTURE_2D, 0);
}
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageOutput.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 2,690 |
```objective-c
//
// GPUImageHistogramEqualizationFilter.h
// FilterShowcase
//
// Created by Adam Marcus on 19/08/2014.
//
#import "GPUImageFilterGroup.h"
#import "GPUImageHistogramFilter.h"
#import "GPUImageRawDataOutput.h"
#import "GPUImageRawDataInput.h"
#import "GPUImageTwoInputFilter.h"
@interface GPUImageHistogramEqualizationFilter : GPUImageFilterGroup
{
GPUImageHistogramFilter *histogramFilter;
GPUImageRawDataOutput *rawDataOutputFilter;
GPUImageRawDataInput *rawDataInputFilter;
}
@property(readwrite, nonatomic) NSUInteger downsamplingFactor;
- (id)initWithHistogramType:(GPUImageHistogramType)newHistogramType;
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageHistogramEqualizationFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 155 |
```objective-c
#import "GPUImageFilter.h"
@interface GPUImageTwoPassFilter : GPUImageFilter
{
GPUImageFramebuffer *secondOutputFramebuffer;
GLProgram *secondFilterProgram;
GLint secondFilterPositionAttribute, secondFilterTextureCoordinateAttribute;
GLint secondFilterInputTextureUniform, secondFilterInputTextureUniform2;
NSMutableDictionary *secondProgramUniformStateRestorationBlocks;
}
// Initialization and teardown
- (id)initWithFirstStageVertexShaderFromString:(NSString *)firstStageVertexShaderString firstStageFragmentShaderFromString:(NSString *)firstStageFragmentShaderString secondStageVertexShaderFromString:(NSString *)secondStageVertexShaderString secondStageFragmentShaderFromString:(NSString *)secondStageFragmentShaderString;
- (id)initWithFirstStageFragmentShaderFromString:(NSString *)firstStageFragmentShaderString secondStageFragmentShaderFromString:(NSString *)secondStageFragmentShaderString;
- (void)initializeSecondaryAttributes;
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageTwoPassFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 181 |
```objective-c
#import "GPUImageRGBClosingFilter.h"
#import "GPUImageRGBErosionFilter.h"
#import "GPUImageRGBDilationFilter.h"
@implementation GPUImageRGBClosingFilter
- (id)init;
{
if (!(self = [self initWithRadius:1]))
{
return nil;
}
return self;
}
- (id)initWithRadius:(NSUInteger)radius;
{
if (!(self = [super init]))
{
return nil;
}
// First pass: dilation
dilationFilter = [[GPUImageRGBDilationFilter alloc] initWithRadius:radius];
[self addFilter:dilationFilter];
// Second pass: erosion
erosionFilter = [[GPUImageRGBErosionFilter alloc] initWithRadius:radius];
[self addFilter:erosionFilter];
[dilationFilter addTarget:erosionFilter];
self.initialFilters = [NSArray arrayWithObjects:dilationFilter, nil];
self.terminalFilter = erosionFilter;
return self;
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageRGBClosingFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 217 |
```objective-c
#import "GPUImageHoughTransformLineDetector.h"
@interface GPUImageHoughTransformLineDetector()
- (void)extractLineParametersFromImageAtFrameTime:(CMTime)frameTime;
@end
@implementation GPUImageHoughTransformLineDetector
@synthesize linesDetectedBlock;
@synthesize edgeThreshold;
@synthesize lineDetectionThreshold;
@synthesize intermediateImages = _intermediateImages;
- (id)init;
{
if (!(self = [super init]))
{
return nil;
}
// First pass: do edge detection and threshold that to just have white pixels for edges
// if ([GPUImageContext deviceSupportsFramebufferReads])
// if ([GPUImageContext deviceSupportsFramebufferReads])
// {
// thresholdEdgeDetectionFilter = [[GPUImageThresholdEdgeDetectionFilter alloc] init];
// thresholdEdgeDetectionFilter = [[GPUImageSobelEdgeDetectionFilter alloc] init];
// [(GPUImageThresholdEdgeDetectionFilter *)thresholdEdgeDetectionFilter setThreshold:0.07];
// [(GPUImageThresholdEdgeDetectionFilter *)thresholdEdgeDetectionFilter setEdgeStrength:0.25];
// [(GPUImageThresholdEdgeDetectionFilter *)thresholdEdgeDetectionFilter setEdgeStrength:1.0];
// thresholdEdgeDetectionFilter = [[GPUImageCannyEdgeDetectionFilter alloc] init];
// }
// else
// {
thresholdEdgeDetectionFilter = [[GPUImageCannyEdgeDetectionFilter alloc] init];
// }
[self addFilter:thresholdEdgeDetectionFilter];
// Second pass: extract the white points and draw representative lines in parallel coordinate space
parallelCoordinateLineTransformFilter = [[GPUImageParallelCoordinateLineTransformFilter alloc] init];
[self addFilter:parallelCoordinateLineTransformFilter];
// Third pass: apply non-maximum suppression
if ([GPUImageContext deviceSupportsFramebufferReads])
{
nonMaximumSuppressionFilter = [[GPUImageThresholdedNonMaximumSuppressionFilter alloc] initWithPackedColorspace:YES];
}
else
{
nonMaximumSuppressionFilter = [[GPUImageThresholdedNonMaximumSuppressionFilter alloc] initWithPackedColorspace:NO];
}
[self addFilter:nonMaximumSuppressionFilter];
__unsafe_unretained GPUImageHoughTransformLineDetector *weakSelf = self;
#ifdef DEBUGLINEDETECTION
_intermediateImages = [[NSMutableArray alloc] init];
__unsafe_unretained NSMutableArray *weakIntermediateImages = _intermediateImages;
// __unsafe_unretained GPUImageOutput<GPUImageInput> *weakEdgeDetectionFilter = thresholdEdgeDetectionFilter;
// [thresholdEdgeDetectionFilter setFrameProcessingCompletionBlock:^(GPUImageOutput *filter, CMTime frameTime){
// [weakIntermediateImages removeAllObjects];
// UIImage *intermediateImage = [weakEdgeDetectionFilter imageFromCurrentFramebuffer];
// [weakIntermediateImages addObject:intermediateImage];
// }];
//
// __unsafe_unretained GPUImageOutput<GPUImageInput> *weakParallelCoordinateLineTransformFilter = parallelCoordinateLineTransformFilter;
// [parallelCoordinateLineTransformFilter setFrameProcessingCompletionBlock:^(GPUImageOutput *filter, CMTime frameTime){
// UIImage *intermediateImage = [weakParallelCoordinateLineTransformFilter imageFromCurrentFramebuffer];
// [weakIntermediateImages addObject:intermediateImage];
// }];
__unsafe_unretained GPUImageOutput<GPUImageInput> *weakNonMaximumSuppressionFilter = nonMaximumSuppressionFilter;
[nonMaximumSuppressionFilter setFrameProcessingCompletionBlock:^(GPUImageOutput *filter, CMTime frameTime){
UIImage *intermediateImage = [weakNonMaximumSuppressionFilter imageFromCurrentFramebuffer];
[weakIntermediateImages addObject:intermediateImage];
[weakSelf extractLineParametersFromImageAtFrameTime:frameTime];
}];
#else
[nonMaximumSuppressionFilter setFrameProcessingCompletionBlock:^(GPUImageOutput *filter, CMTime frameTime) {
[weakSelf extractLineParametersFromImageAtFrameTime:frameTime];
}];
#endif
[thresholdEdgeDetectionFilter addTarget:parallelCoordinateLineTransformFilter];
[parallelCoordinateLineTransformFilter addTarget:nonMaximumSuppressionFilter];
self.initialFilters = [NSArray arrayWithObjects:thresholdEdgeDetectionFilter, nil];
// self.terminalFilter = colorPackingFilter;
self.terminalFilter = nonMaximumSuppressionFilter;
// self.edgeThreshold = 0.95;
self.lineDetectionThreshold = 0.12;
return self;
}
- (void)dealloc;
{
free(rawImagePixels);
free(linesArray);
}
#pragma mark -
#pragma mark Corner extraction
- (void)extractLineParametersFromImageAtFrameTime:(CMTime)frameTime;
{
// we need a normal color texture for this filter
NSAssert(self.outputTextureOptions.internalFormat == GL_RGBA, @"The output texture format for this filter must be GL_RGBA.");
NSAssert(self.outputTextureOptions.type == GL_UNSIGNED_BYTE, @"The type of the output texture of this filter must be GL_UNSIGNED_BYTE.");
NSUInteger numberOfLines = 0;
CGSize imageSize = nonMaximumSuppressionFilter.outputFrameSize;
unsigned int imageByteSize = imageSize.width * imageSize.height * 4;
if (rawImagePixels == NULL)
{
rawImagePixels = (GLubyte *)malloc(imageByteSize);
linesArray = calloc(1024 * 2, sizeof(GLfloat));
}
glReadPixels(0, 0, (int)imageSize.width, (int)imageSize.height, GL_RGBA, GL_UNSIGNED_BYTE, rawImagePixels);
// CFAbsoluteTime startTime = CFAbsoluteTimeGetCurrent();
unsigned int imageWidth = imageSize.width * 4;
unsigned int currentByte = 0;
unsigned int cornerStorageIndex = 0;
unsigned long lineStrengthCounter = 0;
while (currentByte < imageByteSize)
{
GLubyte colorByte = rawImagePixels[currentByte];
// NSLog(@"(%d,%d): [%d,%d,%d,%d]", xCoordinate, yCoordinate, rawImagePixels[currentByte], rawImagePixels[currentByte+1], rawImagePixels[currentByte+2], rawImagePixels[currentByte+3]);
// NSLog(@"[%d,%d,%d,%d]", rawImagePixels[currentByte], rawImagePixels[currentByte+1], rawImagePixels[currentByte+2], rawImagePixels[currentByte+3]);
if (colorByte > 0)
{
unsigned int xCoordinate = currentByte % imageWidth;
unsigned int yCoordinate = currentByte / imageWidth;
lineStrengthCounter += colorByte;
// NSLog(@"(%d,%d): [%d,%d,%d,%d]", xCoordinate, yCoordinate, rawImagePixels[currentByte], rawImagePixels[currentByte+1], rawImagePixels[currentByte+2], rawImagePixels[currentByte+3]);
CGFloat normalizedXCoordinate = -1.0 + 2.0 * (CGFloat)(xCoordinate / 4) / imageSize.width;
CGFloat normalizedYCoordinate = -1.0 + 2.0 * (CGFloat)(yCoordinate) / imageSize.height;
if (normalizedXCoordinate < 0.0)
{
// T space
// m = -1 - d/u
// b = d * v/u
if (normalizedXCoordinate > -0.05) // Test for the case right near the X axis, stamp the X intercept instead of the Y
{
linesArray[cornerStorageIndex++] = 100000.0;
linesArray[cornerStorageIndex++] = normalizedYCoordinate;
}
else
{
linesArray[cornerStorageIndex++] = -1.0 - 1.0 / normalizedXCoordinate;
linesArray[cornerStorageIndex++] = 1.0 * normalizedYCoordinate / normalizedXCoordinate;
}
}
else
{
// S space
// m = 1 - d/u
// b = d * v/u
if (normalizedXCoordinate < 0.05) // Test for the case right near the X axis, stamp the X intercept instead of the Y
{
linesArray[cornerStorageIndex++] = 100000.0;
linesArray[cornerStorageIndex++] = normalizedYCoordinate;
}
else
{
linesArray[cornerStorageIndex++] = 1.0 - 1.0 / normalizedXCoordinate;
linesArray[cornerStorageIndex++] = 1.0 * normalizedYCoordinate / normalizedXCoordinate;
}
}
numberOfLines++;
numberOfLines = MIN(numberOfLines, 1023);
cornerStorageIndex = MIN(cornerStorageIndex, 2040);
}
currentByte +=4;
}
// CFAbsoluteTime currentFrameTime = (CFAbsoluteTimeGetCurrent() - startTime);
// NSLog(@"Processing time : %f ms", 1000.0 * currentFrameTime);
if (linesDetectedBlock != NULL)
{
linesDetectedBlock(linesArray, numberOfLines, frameTime);
}
}
- (BOOL)wantsMonochromeInput;
{
// return YES;
return NO;
}
#pragma mark -
#pragma mark Accessors
//- (void)setEdgeThreshold:(CGFloat)newValue;
//{
// [(GPUImageCannyEdgeDetectionFilter *)thresholdEdgeDetectionFilter setThreshold:newValue];
//}
//
//- (CGFloat)edgeThreshold;
//{
// return [(GPUImageCannyEdgeDetectionFilter *)thresholdEdgeDetectionFilter threshold];
//}
- (void)setLineDetectionThreshold:(CGFloat)newValue;
{
nonMaximumSuppressionFilter.threshold = newValue;
}
- (CGFloat)lineDetectionThreshold;
{
return nonMaximumSuppressionFilter.threshold;
}
#ifdef DEBUGLINEDETECTION
- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex;
{
// [thresholdEdgeDetectionFilter useNextFrameForImageCapture];
// [parallelCoordinateLineTransformFilter useNextFrameForImageCapture];
[nonMaximumSuppressionFilter useNextFrameForImageCapture];
[super newFrameReadyAtTime:frameTime atIndex:textureIndex];
}
#endif
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageHoughTransformLineDetector.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 2,142 |
```objective-c
#import "GPUImageTwoInputFilter.h"
/** Applies a color dodge blend of two images
*/
@interface GPUImageColorDodgeBlendFilter : GPUImageTwoInputFilter
{
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageColorDodgeBlendFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 40 |
```objective-c
#import "GPUImageThresholdedNonMaximumSuppressionFilter.h"
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageThresholdedNonMaximumSuppressionFragmentShaderString = SHADER_STRING
(
uniform sampler2D inputImageTexture;
varying highp vec2 textureCoordinate;
varying highp vec2 leftTextureCoordinate;
varying highp vec2 rightTextureCoordinate;
varying highp vec2 topTextureCoordinate;
varying highp vec2 topLeftTextureCoordinate;
varying highp vec2 topRightTextureCoordinate;
varying highp vec2 bottomTextureCoordinate;
varying highp vec2 bottomLeftTextureCoordinate;
varying highp vec2 bottomRightTextureCoordinate;
uniform lowp float threshold;
void main()
{
lowp float bottomColor = texture2D(inputImageTexture, bottomTextureCoordinate).r;
lowp float bottomLeftColor = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r;
lowp float bottomRightColor = texture2D(inputImageTexture, bottomRightTextureCoordinate).r;
lowp vec4 centerColor = texture2D(inputImageTexture, textureCoordinate);
lowp float leftColor = texture2D(inputImageTexture, leftTextureCoordinate).r;
lowp float rightColor = texture2D(inputImageTexture, rightTextureCoordinate).r;
lowp float topColor = texture2D(inputImageTexture, topTextureCoordinate).r;
lowp float topRightColor = texture2D(inputImageTexture, topRightTextureCoordinate).r;
lowp float topLeftColor = texture2D(inputImageTexture, topLeftTextureCoordinate).r;
// Use a tiebreaker for pixels to the left and immediately above this one
lowp float multiplier = 1.0 - step(centerColor.r, topColor);
multiplier = multiplier * (1.0 - step(centerColor.r, topLeftColor));
multiplier = multiplier * (1.0 - step(centerColor.r, leftColor));
multiplier = multiplier * (1.0 - step(centerColor.r, bottomLeftColor));
lowp float maxValue = max(centerColor.r, bottomColor);
maxValue = max(maxValue, bottomRightColor);
maxValue = max(maxValue, rightColor);
maxValue = max(maxValue, topRightColor);
lowp float finalValue = centerColor.r * step(maxValue, centerColor.r) * multiplier;
finalValue = step(threshold, finalValue);
gl_FragColor = vec4(finalValue, finalValue, finalValue, 1.0);
//
// gl_FragColor = vec4((centerColor.rgb * step(maxValue, step(threshold, centerColor.r)) * multiplier), 1.0);
}
);
NSString *const your_sha256_hashtShaderString = SHADER_STRING
(
uniform sampler2D inputImageTexture;
varying highp vec2 textureCoordinate;
varying highp vec2 leftTextureCoordinate;
varying highp vec2 rightTextureCoordinate;
varying highp vec2 topTextureCoordinate;
varying highp vec2 topLeftTextureCoordinate;
varying highp vec2 topRightTextureCoordinate;
varying highp vec2 bottomTextureCoordinate;
varying highp vec2 bottomLeftTextureCoordinate;
varying highp vec2 bottomRightTextureCoordinate;
uniform lowp float threshold;
uniform highp float texelWidth;
uniform highp float texelHeight;
highp float encodedIntensity(highp vec3 sourceColor)
{
return (sourceColor.b * 256.0 * 256.0 + sourceColor.g * 256.0 + sourceColor.r);
}
void main()
{
highp float bottomColor = encodedIntensity(texture2D(inputImageTexture, bottomTextureCoordinate).rgb);
highp float bottomLeftColor = encodedIntensity(texture2D(inputImageTexture, bottomLeftTextureCoordinate).rgb);
highp float bottomRightColor = encodedIntensity(texture2D(inputImageTexture, bottomRightTextureCoordinate).rgb);
highp float centerColor = encodedIntensity(texture2D(inputImageTexture, textureCoordinate).rgb);
highp float leftColor = encodedIntensity(texture2D(inputImageTexture, leftTextureCoordinate).rgb);
highp float rightColor = encodedIntensity(texture2D(inputImageTexture, rightTextureCoordinate).rgb);
highp float topColor = encodedIntensity(texture2D(inputImageTexture, topTextureCoordinate).rgb);
highp float topRightColor = encodedIntensity(texture2D(inputImageTexture, topRightTextureCoordinate).rgb);
highp float topLeftColor = encodedIntensity(texture2D(inputImageTexture, topLeftTextureCoordinate).rgb);
highp float secondStageColor1 = encodedIntensity(texture2D(inputImageTexture, textureCoordinate + vec2(-2.0 * texelWidth, -2.0 * texelHeight)).rgb);
highp float secondStageColor2 = encodedIntensity(texture2D(inputImageTexture, textureCoordinate + vec2(-2.0 * texelWidth, -1.0 * texelHeight)).rgb);
highp float secondStageColor3 = encodedIntensity(texture2D(inputImageTexture, textureCoordinate + vec2(-2.0 * texelWidth, 0.0)).rgb);
highp float secondStageColor4 = encodedIntensity(texture2D(inputImageTexture, textureCoordinate + vec2(-2.0 * texelWidth, 1.0 * texelHeight)).rgb);
highp float secondStageColor5 = encodedIntensity(texture2D(inputImageTexture, textureCoordinate + vec2(-2.0 * texelWidth, 2.0 * texelHeight)).rgb);
highp float secondStageColor6 = encodedIntensity(texture2D(inputImageTexture, textureCoordinate + vec2(-1.0 * texelWidth, 2.0 * texelHeight)).rgb);
highp float secondStageColor7 = encodedIntensity(texture2D(inputImageTexture, textureCoordinate + vec2(0.0, 2.0 * texelHeight)).rgb);
highp float secondStageColor8 = encodedIntensity(texture2D(inputImageTexture, textureCoordinate + vec2(1.0 * texelWidth, 2.0 * texelHeight)).rgb);
highp float thirdStageColor1 = encodedIntensity(texture2D(inputImageTexture, textureCoordinate + vec2(-1.0 * texelWidth, -2.0 * texelHeight)).rgb);
highp float thirdStageColor2 = encodedIntensity(texture2D(inputImageTexture, textureCoordinate + vec2(0.0, -2.0 * texelHeight)).rgb);
highp float thirdStageColor3 = encodedIntensity(texture2D(inputImageTexture, textureCoordinate + vec2(1.0 * texelWidth, -2.0 * texelHeight)).rgb);
highp float thirdStageColor4 = encodedIntensity(texture2D(inputImageTexture, textureCoordinate + vec2(2.0 * texelWidth, -2.0 * texelHeight)).rgb);
highp float thirdStageColor5 = encodedIntensity(texture2D(inputImageTexture, textureCoordinate + vec2(2.0 * texelWidth, -1.0 * texelHeight)).rgb);
highp float thirdStageColor6 = encodedIntensity(texture2D(inputImageTexture, textureCoordinate + vec2(2.0 * texelWidth, 0.0)).rgb);
highp float thirdStageColor7 = encodedIntensity(texture2D(inputImageTexture, textureCoordinate + vec2(2.0 * texelWidth, 1.0 * texelHeight)).rgb);
highp float thirdStageColor8 = encodedIntensity(texture2D(inputImageTexture, textureCoordinate + vec2(2.0 * texelWidth, 2.0 * texelHeight)).rgb);
// Use a tiebreaker for pixels to the left and immediately above this one
highp float multiplier = 1.0 - step(centerColor, topColor);
multiplier = multiplier * (1.0 - step(centerColor, topLeftColor));
multiplier = multiplier * (1.0 - step(centerColor, leftColor));
multiplier = multiplier * (1.0 - step(centerColor, bottomLeftColor));
multiplier = multiplier * (1.0 - step(centerColor, secondStageColor1));
multiplier = multiplier * (1.0 - step(centerColor, secondStageColor2));
multiplier = multiplier * (1.0 - step(centerColor, secondStageColor3));
multiplier = multiplier * (1.0 - step(centerColor, secondStageColor4));
multiplier = multiplier * (1.0 - step(centerColor, secondStageColor5));
multiplier = multiplier * (1.0 - step(centerColor, secondStageColor6));
multiplier = multiplier * (1.0 - step(centerColor, secondStageColor7));
multiplier = multiplier * (1.0 - step(centerColor, secondStageColor8));
highp float maxValue = max(centerColor, bottomColor);
maxValue = max(maxValue, bottomRightColor);
maxValue = max(maxValue, rightColor);
maxValue = max(maxValue, topRightColor);
maxValue = max(maxValue, thirdStageColor1);
maxValue = max(maxValue, thirdStageColor2);
maxValue = max(maxValue, thirdStageColor3);
maxValue = max(maxValue, thirdStageColor4);
maxValue = max(maxValue, thirdStageColor5);
maxValue = max(maxValue, thirdStageColor6);
maxValue = max(maxValue, thirdStageColor7);
maxValue = max(maxValue, thirdStageColor8);
highp float midValue = centerColor * step(maxValue, centerColor) * multiplier;
highp float finalValue = step(threshold, midValue);
gl_FragColor = vec4(finalValue * centerColor, topLeftColor, topRightColor, topColor);
}
);
#else
NSString *const kGPUImageThresholdedNonMaximumSuppressionFragmentShaderString = SHADER_STRING
(
uniform sampler2D inputImageTexture;
varying vec2 textureCoordinate;
varying vec2 leftTextureCoordinate;
varying vec2 rightTextureCoordinate;
varying vec2 topTextureCoordinate;
varying vec2 topLeftTextureCoordinate;
varying vec2 topRightTextureCoordinate;
varying vec2 bottomTextureCoordinate;
varying vec2 bottomLeftTextureCoordinate;
varying vec2 bottomRightTextureCoordinate;
uniform float threshold;
void main()
{
float bottomColor = texture2D(inputImageTexture, bottomTextureCoordinate).r;
float bottomLeftColor = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r;
float bottomRightColor = texture2D(inputImageTexture, bottomRightTextureCoordinate).r;
vec4 centerColor = texture2D(inputImageTexture, textureCoordinate);
float leftColor = texture2D(inputImageTexture, leftTextureCoordinate).r;
float rightColor = texture2D(inputImageTexture, rightTextureCoordinate).r;
float topColor = texture2D(inputImageTexture, topTextureCoordinate).r;
float topRightColor = texture2D(inputImageTexture, topRightTextureCoordinate).r;
float topLeftColor = texture2D(inputImageTexture, topLeftTextureCoordinate).r;
// Use a tiebreaker for pixels to the left and immediately above this one
float multiplier = 1.0 - step(centerColor.r, topColor);
multiplier = multiplier * (1.0 - step(centerColor.r, topLeftColor));
multiplier = multiplier * (1.0 - step(centerColor.r, leftColor));
multiplier = multiplier * (1.0 - step(centerColor.r, bottomLeftColor));
float maxValue = max(centerColor.r, bottomColor);
maxValue = max(maxValue, bottomRightColor);
maxValue = max(maxValue, rightColor);
maxValue = max(maxValue, topRightColor);
float finalValue = centerColor.r * step(maxValue, centerColor.r) * multiplier;
finalValue = step(threshold, finalValue);
gl_FragColor = vec4(finalValue, finalValue, finalValue, 1.0);
//
// gl_FragColor = vec4((centerColor.rgb * step(maxValue, step(threshold, centerColor.r)) * multiplier), 1.0);
}
);
NSString *const your_sha256_hashtShaderString = SHADER_STRING
(
uniform sampler2D inputImageTexture;
varying vec2 textureCoordinate;
varying vec2 leftTextureCoordinate;
varying vec2 rightTextureCoordinate;
varying vec2 topTextureCoordinate;
varying vec2 topLeftTextureCoordinate;
varying vec2 topRightTextureCoordinate;
varying vec2 bottomTextureCoordinate;
varying vec2 bottomLeftTextureCoordinate;
varying vec2 bottomRightTextureCoordinate;
uniform float threshold;
void main()
{
float bottomColor = texture2D(inputImageTexture, bottomTextureCoordinate).r;
float bottomLeftColor = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r;
float bottomRightColor = texture2D(inputImageTexture, bottomRightTextureCoordinate).r;
vec4 centerColor = texture2D(inputImageTexture, textureCoordinate);
float leftColor = texture2D(inputImageTexture, leftTextureCoordinate).r;
float rightColor = texture2D(inputImageTexture, rightTextureCoordinate).r;
float topColor = texture2D(inputImageTexture, topTextureCoordinate).r;
float topRightColor = texture2D(inputImageTexture, topRightTextureCoordinate).r;
float topLeftColor = texture2D(inputImageTexture, topLeftTextureCoordinate).r;
// Use a tiebreaker for pixels to the left and immediately above this one
float multiplier = 1.0 - step(centerColor.r, topColor);
multiplier = multiplier * (1.0 - step(centerColor.r, topLeftColor));
multiplier = multiplier * (1.0 - step(centerColor.r, leftColor));
multiplier = multiplier * (1.0 - step(centerColor.r, bottomLeftColor));
float maxValue = max(centerColor.r, bottomColor);
maxValue = max(maxValue, bottomRightColor);
maxValue = max(maxValue, rightColor);
maxValue = max(maxValue, topRightColor);
float finalValue = centerColor.r * step(maxValue, centerColor.r) * multiplier;
finalValue = step(threshold, finalValue);
gl_FragColor = vec4(finalValue, finalValue, finalValue, 1.0);
//
// gl_FragColor = vec4((centerColor.rgb * step(maxValue, step(threshold, centerColor.r)) * multiplier), 1.0);
}
);
#endif
@implementation GPUImageThresholdedNonMaximumSuppressionFilter
@synthesize threshold = _threshold;
#pragma mark -
#pragma mark Initialization and teardown
- (id)init;
{
if (!(self = [self initWithPackedColorspace:NO]))
{
return nil;
}
return self;
}
- (id)initWithPackedColorspace:(BOOL)inputUsesPackedColorspace;
{
NSString *shaderString;
if (inputUsesPackedColorspace)
{
shaderString = your_sha256_hashtShaderString;
}
else
{
shaderString = kGPUImageThresholdedNonMaximumSuppressionFragmentShaderString;
}
if (!(self = [super initWithFragmentShaderFromString:shaderString]))
{
return nil;
}
thresholdUniform = [filterProgram uniformIndex:@"threshold"];
self.threshold = 0.9;
return self;
}
#pragma mark -
#pragma mark Accessors
- (void)setThreshold:(CGFloat)newValue;
{
_threshold = newValue;
[self setFloat:_threshold forUniform:thresholdUniform program:filterProgram];
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageThresholdedNonMaximumSuppressionFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 3,306 |
```objective-c
// This is Jeff LaMarche's GLProgram OpenGL shader wrapper class from his OpenGL ES 2.0 book.
// A description of this can be found at his page on the topic:
// path_to_url
#import "GLProgram.h"
// START:typedefs
#pragma mark Function Pointer Definitions
typedef void (*GLInfoFunction)(GLuint program, GLenum pname, GLint* params);
typedef void (*GLLogFunction) (GLuint program, GLsizei bufsize, GLsizei* length, GLchar* infolog);
// END:typedefs
#pragma mark -
#pragma mark Private Extension Method Declaration
// START:extension
@interface GLProgram()
- (BOOL)compileShader:(GLuint *)shader
type:(GLenum)type
string:(NSString *)shaderString;
@end
// END:extension
#pragma mark -
@implementation GLProgram
// START:init
@synthesize initialized = _initialized;
- (id)initWithVertexShaderString:(NSString *)vShaderString
fragmentShaderString:(NSString *)fShaderString;
{
if ((self = [super init]))
{
_initialized = NO;
attributes = [[NSMutableArray alloc] init];
uniforms = [[NSMutableArray alloc] init];
program = glCreateProgram();
if (![self compileShader:&vertShader
type:GL_VERTEX_SHADER
string:vShaderString])
{
NSLog(@"Failed to compile vertex shader");
}
// Create and compile fragment shader
if (![self compileShader:&fragShader
type:GL_FRAGMENT_SHADER
string:fShaderString])
{
NSLog(@"Failed to compile fragment shader");
}
glAttachShader(program, vertShader);
glAttachShader(program, fragShader);
}
return self;
}
- (id)initWithVertexShaderString:(NSString *)vShaderString
fragmentShaderFilename:(NSString *)fShaderFilename;
{
NSString *fragShaderPathname = [[NSBundle mainBundle] pathForResource:fShaderFilename ofType:@"fsh"];
NSString *fragmentShaderString = [NSString stringWithContentsOfFile:fragShaderPathname encoding:NSUTF8StringEncoding error:nil];
if ((self = [self initWithVertexShaderString:vShaderString fragmentShaderString:fragmentShaderString]))
{
}
return self;
}
- (id)initWithVertexShaderFilename:(NSString *)vShaderFilename
fragmentShaderFilename:(NSString *)fShaderFilename;
{
NSString *vertShaderPathname = [[NSBundle mainBundle] pathForResource:vShaderFilename ofType:@"vsh"];
NSString *vertexShaderString = [NSString stringWithContentsOfFile:vertShaderPathname encoding:NSUTF8StringEncoding error:nil];
NSString *fragShaderPathname = [[NSBundle mainBundle] pathForResource:fShaderFilename ofType:@"fsh"];
NSString *fragmentShaderString = [NSString stringWithContentsOfFile:fragShaderPathname encoding:NSUTF8StringEncoding error:nil];
if ((self = [self initWithVertexShaderString:vertexShaderString fragmentShaderString:fragmentShaderString]))
{
}
return self;
}
// END:init
// START:compile
- (BOOL)compileShader:(GLuint *)shader
type:(GLenum)type
string:(NSString *)shaderString
{
// CFAbsoluteTime startTime = CFAbsoluteTimeGetCurrent();
GLint status;
const GLchar *source;
source =
(GLchar *)[shaderString UTF8String];
if (!source)
{
NSLog(@"Failed to load vertex shader");
return NO;
}
*shader = glCreateShader(type);
glShaderSource(*shader, 1, &source, NULL);
glCompileShader(*shader);
glGetShaderiv(*shader, GL_COMPILE_STATUS, &status);
if (status != GL_TRUE)
{
GLint logLength;
glGetShaderiv(*shader, GL_INFO_LOG_LENGTH, &logLength);
if (logLength > 0)
{
GLchar *log = (GLchar *)malloc(logLength);
glGetShaderInfoLog(*shader, logLength, &logLength, log);
if (shader == &vertShader)
{
self.vertexShaderLog = [NSString stringWithFormat:@"%s", log];
}
else
{
self.fragmentShaderLog = [NSString stringWithFormat:@"%s", log];
}
free(log);
}
}
// CFAbsoluteTime linkTime = (CFAbsoluteTimeGetCurrent() - startTime);
// NSLog(@"Compiled in %f ms", linkTime * 1000.0);
return status == GL_TRUE;
}
// END:compile
#pragma mark -
// START:addattribute
- (void)addAttribute:(NSString *)attributeName
{
if (![attributes containsObject:attributeName])
{
[attributes addObject:attributeName];
glBindAttribLocation(program,
(GLuint)[attributes indexOfObject:attributeName],
[attributeName UTF8String]);
}
}
// END:addattribute
// START:indexmethods
- (GLuint)attributeIndex:(NSString *)attributeName
{
return (GLuint)[attributes indexOfObject:attributeName];
}
- (GLuint)uniformIndex:(NSString *)uniformName
{
return glGetUniformLocation(program, [uniformName UTF8String]);
}
// END:indexmethods
#pragma mark -
// START:link
- (BOOL)link
{
// CFAbsoluteTime startTime = CFAbsoluteTimeGetCurrent();
GLint status;
glLinkProgram(program);
glGetProgramiv(program, GL_LINK_STATUS, &status);
if (status == GL_FALSE)
return NO;
if (vertShader)
{
glDeleteShader(vertShader);
vertShader = 0;
}
if (fragShader)
{
glDeleteShader(fragShader);
fragShader = 0;
}
self.initialized = YES;
// CFAbsoluteTime linkTime = (CFAbsoluteTimeGetCurrent() - startTime);
// NSLog(@"Linked in %f ms", linkTime * 1000.0);
return YES;
}
// END:link
// START:use
- (void)use
{
glUseProgram(program);
}
// END:use
#pragma mark -
- (void)validate;
{
GLint logLength;
glValidateProgram(program);
glGetProgramiv(program, GL_INFO_LOG_LENGTH, &logLength);
if (logLength > 0)
{
GLchar *log = (GLchar *)malloc(logLength);
glGetProgramInfoLog(program, logLength, &logLength, log);
self.programLog = [NSString stringWithFormat:@"%s", log];
free(log);
}
}
#pragma mark -
// START:dealloc
- (void)dealloc
{
if (vertShader)
glDeleteShader(vertShader);
if (fragShader)
glDeleteShader(fragShader);
if (program)
glDeleteProgram(program);
}
// END:dealloc
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GLProgram.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 1,446 |
```objective-c
#import "GPUImageFilter.h"
@interface GPUImagePixellateFilter : GPUImageFilter
{
GLint fractionalWidthOfAPixelUniform, aspectRatioUniform;
}
// The fractional width of the image to use as a size for the pixels in the resulting image. Values below one pixel width in the source image are ignored.
@property(readwrite, nonatomic) CGFloat fractionalWidthOfAPixel;
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImagePixellateFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 82 |
```objective-c
#import "GPUImageFalseColorFilter.h"
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUFalseColorFragmentShaderString = SHADER_STRING
(
precision lowp float;
varying highp vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform float intensity;
uniform vec3 firstColor;
uniform vec3 secondColor;
const mediump vec3 luminanceWeighting = vec3(0.2125, 0.7154, 0.0721);
void main()
{
lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
float luminance = dot(textureColor.rgb, luminanceWeighting);
gl_FragColor = vec4( mix(firstColor.rgb, secondColor.rgb, luminance), textureColor.a);
}
);
#else
NSString *const kGPUFalseColorFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform float intensity;
uniform vec3 firstColor;
uniform vec3 secondColor;
const vec3 luminanceWeighting = vec3(0.2125, 0.7154, 0.0721);
void main()
{
vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
float luminance = dot(textureColor.rgb, luminanceWeighting);
gl_FragColor = vec4( mix(firstColor.rgb, secondColor.rgb, luminance), textureColor.a);
}
);
#endif
@implementation GPUImageFalseColorFilter
@synthesize secondColor = _secondColor;
@synthesize firstColor = _firstColor;
- (id)init;
{
if (!(self = [super initWithFragmentShaderFromString:kGPUFalseColorFragmentShaderString]))
{
return nil;
}
firstColorUniform = [filterProgram uniformIndex:@"firstColor"];
secondColorUniform = [filterProgram uniformIndex:@"secondColor"];
self.firstColor = (GPUVector4){0.0f, 0.0f, 0.5f, 1.0f};
self.secondColor = (GPUVector4){1.0f, 0.0f, 0.0f, 1.0f};
return self;
}
#pragma mark -
#pragma mark Accessors
- (void)setFirstColor:(GPUVector4)newValue;
{
_firstColor = newValue;
[self setFirstColorRed:_firstColor.one green:_firstColor.two blue:_firstColor.three];
}
- (void)setSecondColor:(GPUVector4)newValue;
{
_secondColor = newValue;
[self setSecondColorRed:_secondColor.one green:_secondColor.two blue:_secondColor.three];
}
- (void)setFirstColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent;
{
GPUVector3 firstColor = {redComponent, greenComponent, blueComponent};
[self setVec3:firstColor forUniform:firstColorUniform program:filterProgram];
}
- (void)setSecondColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent;
{
GPUVector3 secondColor = {redComponent, greenComponent, blueComponent};
[self setVec3:secondColor forUniform:secondColorUniform program:filterProgram];
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageFalseColorFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 700 |
```objective-c
#import "GLProgram.h"
// Base classes
#import "GPUImageContext.h"
#import "GPUImageOutput.h"
#import "GPUImageView.h"
#import "GPUImageVideoCamera.h"
#import "GPUImageStillCamera.h"
#import "GPUImageMovie.h"
#import "GPUImagePicture.h"
#import "GPUImageRawDataInput.h"
#import "GPUImageRawDataOutput.h"
#import "GPUImageMovieWriter.h"
#import "GPUImageFilterPipeline.h"
#import "GPUImageTextureOutput.h"
#import "GPUImageFilterGroup.h"
#import "GPUImageTextureInput.h"
#import "GPUImageUIElement.h"
#import "GPUImageBuffer.h"
#import "GPUImageFramebuffer.h"
#import "GPUImageFramebufferCache.h"
// Filters
#import "GPUImageFilter.h"
#import "GPUImageTwoInputFilter.h"
#import "GPUImagePixellateFilter.h"
#import "GPUImagePixellatePositionFilter.h"
#import "GPUImageSepiaFilter.h"
#import "GPUImageColorInvertFilter.h"
#import "GPUImageSaturationFilter.h"
#import "GPUImageContrastFilter.h"
#import "GPUImageExposureFilter.h"
#import "GPUImageBrightnessFilter.h"
#import "GPUImageLevelsFilter.h"
#import "GPUImageSharpenFilter.h"
#import "GPUImageGammaFilter.h"
#import "GPUImageSobelEdgeDetectionFilter.h"
#import "GPUImageSketchFilter.h"
#import "GPUImageToonFilter.h"
#import "GPUImageSmoothToonFilter.h"
#import "GPUImageMultiplyBlendFilter.h"
#import "GPUImageDissolveBlendFilter.h"
#import "GPUImageKuwaharaFilter.h"
#import "GPUImageKuwaharaRadius3Filter.h"
#import "GPUImageVignetteFilter.h"
#import "GPUImageGaussianBlurFilter.h"
#import "GPUImageGaussianBlurPositionFilter.h"
#import "GPUImageGaussianSelectiveBlurFilter.h"
#import "GPUImageOverlayBlendFilter.h"
#import "GPUImageDarkenBlendFilter.h"
#import "GPUImageLightenBlendFilter.h"
#import "GPUImageSwirlFilter.h"
#import "GPUImageSourceOverBlendFilter.h"
#import "GPUImageColorBurnBlendFilter.h"
#import "GPUImageColorDodgeBlendFilter.h"
#import "GPUImageScreenBlendFilter.h"
#import "GPUImageExclusionBlendFilter.h"
#import "GPUImageDifferenceBlendFilter.h"
#import "GPUImageSubtractBlendFilter.h"
#import "GPUImageHardLightBlendFilter.h"
#import "GPUImageSoftLightBlendFilter.h"
#import "GPUImageColorBlendFilter.h"
#import "GPUImageHueBlendFilter.h"
#import "GPUImageSaturationBlendFilter.h"
#import "GPUImageLuminosityBlendFilter.h"
#import "GPUImageCropFilter.h"
#import "GPUImageGrayscaleFilter.h"
#import "GPUImageTransformFilter.h"
#import "GPUImageChromaKeyBlendFilter.h"
#import "GPUImageHazeFilter.h"
#import "GPUImageLuminanceThresholdFilter.h"
#import "GPUImagePosterizeFilter.h"
#import "GPUImageBoxBlurFilter.h"
#import "GPUImageAdaptiveThresholdFilter.h"
#import "GPUImageUnsharpMaskFilter.h"
#import "GPUImageBulgeDistortionFilter.h"
#import "GPUImagePinchDistortionFilter.h"
#import "GPUImageCrosshatchFilter.h"
#import "GPUImageCGAColorspaceFilter.h"
#import "GPUImagePolarPixellateFilter.h"
#import "GPUImageStretchDistortionFilter.h"
#import "GPUImagePerlinNoiseFilter.h"
#import "GPUImageJFAVoronoiFilter.h"
#import "GPUImageVoronoiConsumerFilter.h"
#import "GPUImageMosaicFilter.h"
#import "GPUImageTiltShiftFilter.h"
#import "GPUImage3x3ConvolutionFilter.h"
#import "GPUImageEmbossFilter.h"
#import "GPUImageCannyEdgeDetectionFilter.h"
#import "GPUImageThresholdEdgeDetectionFilter.h"
#import "GPUImageMaskFilter.h"
#import "GPUImageHistogramFilter.h"
#import "GPUImageHistogramGenerator.h"
#import "GPUImageHistogramEqualizationFilter.h"
#import "GPUImagePrewittEdgeDetectionFilter.h"
#import "GPUImageXYDerivativeFilter.h"
#import "GPUImageHarrisCornerDetectionFilter.h"
#import "GPUImageAlphaBlendFilter.h"
#import "GPUImageNormalBlendFilter.h"
#import "GPUImageNonMaximumSuppressionFilter.h"
#import "GPUImageRGBFilter.h"
#import "GPUImageMedianFilter.h"
#import "GPUImageBilateralFilter.h"
#import "GPUImageCrosshairGenerator.h"
#import "GPUImageToneCurveFilter.h"
#import "GPUImageNobleCornerDetectionFilter.h"
#import "GPUImageShiTomasiFeatureDetectionFilter.h"
#import "GPUImageErosionFilter.h"
#import "GPUImageRGBErosionFilter.h"
#import "GPUImageDilationFilter.h"
#import "GPUImageRGBDilationFilter.h"
#import "GPUImageOpeningFilter.h"
#import "GPUImageRGBOpeningFilter.h"
#import "GPUImageClosingFilter.h"
#import "GPUImageRGBClosingFilter.h"
#import "GPUImageColorPackingFilter.h"
#import "GPUImageSphereRefractionFilter.h"
#import "GPUImageMonochromeFilter.h"
#import "GPUImageOpacityFilter.h"
#import "GPUImageHighlightShadowFilter.h"
#import "GPUImageFalseColorFilter.h"
#import "GPUImageHSBFilter.h"
#import "GPUImageHueFilter.h"
#import "GPUImageGlassSphereFilter.h"
#import "GPUImageLookupFilter.h"
#import "GPUImageAmatorkaFilter.h"
#import "GPUImageMissEtikateFilter.h"
#import "GPUImageSoftEleganceFilter.h"
#import "GPUImageAddBlendFilter.h"
#import "GPUImageDivideBlendFilter.h"
#import "GPUImagePolkaDotFilter.h"
#import "GPUImageLocalBinaryPatternFilter.h"
#import "GPUImageLanczosResamplingFilter.h"
#import "GPUImageAverageColor.h"
#import "GPUImageSolidColorGenerator.h"
#import "GPUImageLuminosity.h"
#import "GPUImageAverageLuminanceThresholdFilter.h"
#import "GPUImageWhiteBalanceFilter.h"
#import "GPUImageChromaKeyFilter.h"
#import "GPUImageLowPassFilter.h"
#import "GPUImageHighPassFilter.h"
#import "GPUImageMotionDetector.h"
#import "GPUImageHalftoneFilter.h"
#import "GPUImageThresholdedNonMaximumSuppressionFilter.h"
#import "GPUImageHoughTransformLineDetector.h"
#import "GPUImageParallelCoordinateLineTransformFilter.h"
#import "GPUImageThresholdSketchFilter.h"
#import "GPUImageLineGenerator.h"
#import "GPUImageLinearBurnBlendFilter.h"
#import "GPUImageGaussianBlurPositionFilter.h"
#import "GPUImagePixellatePositionFilter.h"
#import "GPUImageTwoInputCrossTextureSamplingFilter.h"
#import "GPUImagePoissonBlendFilter.h"
#import "GPUImageMotionBlurFilter.h"
#import "GPUImageZoomBlurFilter.h"
#import "GPUImageLaplacianFilter.h"
#import "GPUImageiOSBlurFilter.h"
#import "GPUImageLuminanceRangeFilter.h"
#import "GPUImageDirectionalNonMaximumSuppressionFilter.h"
#import "GPUImageDirectionalSobelEdgeDetectionFilter.h"
#import "GPUImageSingleComponentGaussianBlurFilter.h"
#import "GPUImageThreeInputFilter.h"
#import "GPUImageWeakPixelInclusionFilter.h"
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImage.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 1,533 |
```objective-c
#import "GPUImageColorDodgeBlendFilter.h"
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageColorDodgeBlendFragmentShaderString = SHADER_STRING
(
precision mediump float;
varying highp vec2 textureCoordinate;
varying highp vec2 textureCoordinate2;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
void main()
{
vec4 base = texture2D(inputImageTexture, textureCoordinate);
vec4 overlay = texture2D(inputImageTexture2, textureCoordinate2);
vec3 baseOverlayAlphaProduct = vec3(overlay.a * base.a);
vec3 rightHandProduct = overlay.rgb * (1.0 - base.a) + base.rgb * (1.0 - overlay.a);
vec3 firstBlendColor = baseOverlayAlphaProduct + rightHandProduct;
vec3 overlayRGB = clamp((overlay.rgb / clamp(overlay.a, 0.01, 1.0)) * step(0.0, overlay.a), 0.0, 0.99);
vec3 secondBlendColor = (base.rgb * overlay.a) / (1.0 - overlayRGB) + rightHandProduct;
vec3 colorChoice = step((overlay.rgb * base.a + base.rgb * overlay.a), baseOverlayAlphaProduct);
gl_FragColor = vec4(mix(firstBlendColor, secondBlendColor, colorChoice), 1.0);
}
);
#else
NSString *const kGPUImageColorDodgeBlendFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
varying vec2 textureCoordinate2;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
void main()
{
vec4 base = texture2D(inputImageTexture, textureCoordinate);
vec4 overlay = texture2D(inputImageTexture2, textureCoordinate2);
vec3 baseOverlayAlphaProduct = vec3(overlay.a * base.a);
vec3 rightHandProduct = overlay.rgb * (1.0 - base.a) + base.rgb * (1.0 - overlay.a);
vec3 firstBlendColor = baseOverlayAlphaProduct + rightHandProduct;
vec3 overlayRGB = clamp((overlay.rgb / clamp(overlay.a, 0.01, 1.0)) * step(0.0, overlay.a), 0.0, 0.99);
vec3 secondBlendColor = (base.rgb * overlay.a) / (1.0 - overlayRGB) + rightHandProduct;
vec3 colorChoice = step((overlay.rgb * base.a + base.rgb * overlay.a), baseOverlayAlphaProduct);
gl_FragColor = vec4(mix(firstBlendColor, secondBlendColor, colorChoice), 1.0);
}
);
#endif
@implementation GPUImageColorDodgeBlendFilter
- (id)init;
{
if (!(self = [super initWithFragmentShaderFromString:kGPUImageColorDodgeBlendFragmentShaderString]))
{
return nil;
}
return self;
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageColorDodgeBlendFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 656 |
```objective-c
#import "GPUImageWeakPixelInclusionFilter.h"
@implementation GPUImageWeakPixelInclusionFilter
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageWeakPixelInclusionFragmentShaderString = SHADER_STRING
(
precision lowp float;
varying vec2 textureCoordinate;
varying vec2 leftTextureCoordinate;
varying vec2 rightTextureCoordinate;
varying vec2 topTextureCoordinate;
varying vec2 topLeftTextureCoordinate;
varying vec2 topRightTextureCoordinate;
varying vec2 bottomTextureCoordinate;
varying vec2 bottomLeftTextureCoordinate;
varying vec2 bottomRightTextureCoordinate;
uniform sampler2D inputImageTexture;
void main()
{
float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r;
float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r;
float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r;
float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r;
float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r;
float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r;
float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r;
float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r;
float centerIntensity = texture2D(inputImageTexture, textureCoordinate).r;
float pixelIntensitySum = bottomLeftIntensity + topRightIntensity + topLeftIntensity + bottomRightIntensity + leftIntensity + rightIntensity + bottomIntensity + topIntensity + centerIntensity;
float sumTest = step(1.5, pixelIntensitySum);
float pixelTest = step(0.01, centerIntensity);
gl_FragColor = vec4(vec3(sumTest * pixelTest), 1.0);
}
);
#else
NSString *const kGPUImageWeakPixelInclusionFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
varying vec2 leftTextureCoordinate;
varying vec2 rightTextureCoordinate;
varying vec2 topTextureCoordinate;
varying vec2 topLeftTextureCoordinate;
varying vec2 topRightTextureCoordinate;
varying vec2 bottomTextureCoordinate;
varying vec2 bottomLeftTextureCoordinate;
varying vec2 bottomRightTextureCoordinate;
uniform sampler2D inputImageTexture;
void main()
{
float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r;
float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r;
float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r;
float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r;
float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r;
float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r;
float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r;
float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r;
float centerIntensity = texture2D(inputImageTexture, textureCoordinate).r;
float pixelIntensitySum = bottomLeftIntensity + topRightIntensity + topLeftIntensity + bottomRightIntensity + leftIntensity + rightIntensity + bottomIntensity + topIntensity + centerIntensity;
float sumTest = step(1.5, pixelIntensitySum);
float pixelTest = step(0.01, centerIntensity);
gl_FragColor = vec4(vec3(sumTest * pixelTest), 1.0);
}
);
#endif
#pragma mark -
#pragma mark Initialization and teardown
- (id)init;
{
if (!(self = [self initWithFragmentShaderFromString:kGPUImageWeakPixelInclusionFragmentShaderString]))
{
return nil;
}
return self;
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageWeakPixelInclusionFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 810 |
```objective-c
#import "GPUImageTwoPassFilter.h"
@implementation GPUImageTwoPassFilter
#pragma mark -
#pragma mark Initialization and teardown
- (id)initWithFirstStageVertexShaderFromString:(NSString *)firstStageVertexShaderString firstStageFragmentShaderFromString:(NSString *)firstStageFragmentShaderString secondStageVertexShaderFromString:(NSString *)secondStageVertexShaderString secondStageFragmentShaderFromString:(NSString *)secondStageFragmentShaderString;
{
if (!(self = [super initWithVertexShaderFromString:firstStageVertexShaderString fragmentShaderFromString:firstStageFragmentShaderString]))
{
return nil;
}
secondProgramUniformStateRestorationBlocks = [NSMutableDictionary dictionaryWithCapacity:10];
runSynchronouslyOnVideoProcessingQueue(^{
[GPUImageContext useImageProcessingContext];
secondFilterProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:secondStageVertexShaderString fragmentShaderString:secondStageFragmentShaderString];
if (!secondFilterProgram.initialized)
{
[self initializeSecondaryAttributes];
if (![secondFilterProgram link])
{
NSString *progLog = [secondFilterProgram programLog];
NSLog(@"Program link log: %@", progLog);
NSString *fragLog = [secondFilterProgram fragmentShaderLog];
NSLog(@"Fragment shader compile log: %@", fragLog);
NSString *vertLog = [secondFilterProgram vertexShaderLog];
NSLog(@"Vertex shader compile log: %@", vertLog);
secondFilterProgram = nil;
NSAssert(NO, @"Filter shader link failed");
}
}
secondFilterPositionAttribute = [secondFilterProgram attributeIndex:@"position"];
secondFilterTextureCoordinateAttribute = [secondFilterProgram attributeIndex:@"inputTextureCoordinate"];
secondFilterInputTextureUniform = [secondFilterProgram uniformIndex:@"inputImageTexture"]; // This does assume a name of "inputImageTexture" for the fragment shader
secondFilterInputTextureUniform2 = [secondFilterProgram uniformIndex:@"inputImageTexture2"]; // This does assume a name of "inputImageTexture2" for second input texture in the fragment shader
[GPUImageContext setActiveShaderProgram:secondFilterProgram];
glEnableVertexAttribArray(secondFilterPositionAttribute);
glEnableVertexAttribArray(secondFilterTextureCoordinateAttribute);
});
return self;
}
- (id)initWithFirstStageFragmentShaderFromString:(NSString *)firstStageFragmentShaderString secondStageFragmentShaderFromString:(NSString *)secondStageFragmentShaderString;
{
if (!(self = [self initWithFirstStageVertexShaderFromString:kGPUImageVertexShaderString firstStageFragmentShaderFromString:firstStageFragmentShaderString secondStageVertexShaderFromString:kGPUImageVertexShaderString secondStageFragmentShaderFromString:secondStageFragmentShaderString]))
{
return nil;
}
return self;
}
- (void)initializeSecondaryAttributes;
{
[secondFilterProgram addAttribute:@"position"];
[secondFilterProgram addAttribute:@"inputTextureCoordinate"];
}
#pragma mark -
#pragma mark Managing targets
- (GPUImageFramebuffer *)framebufferForOutput;
{
return secondOutputFramebuffer;
}
- (void)removeOutputFramebuffer;
{
secondOutputFramebuffer = nil;
}
#pragma mark -
#pragma mark Rendering
- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;
{
if (self.preventRendering)
{
[firstInputFramebuffer unlock];
return;
}
[GPUImageContext setActiveShaderProgram:filterProgram];
outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:[self sizeOfFBO] textureOptions:self.outputTextureOptions onlyTexture:NO];
[outputFramebuffer activateFramebuffer];
[self setUniformsForProgramAtIndex:0];
glClearColor(backgroundColorRed, backgroundColorGreen, backgroundColorBlue, backgroundColorAlpha);
glClear(GL_COLOR_BUFFER_BIT);
glActiveTexture(GL_TEXTURE2);
glBindTexture(GL_TEXTURE_2D, [firstInputFramebuffer texture]);
glUniform1i(filterInputTextureUniform, 2);
glVertexAttribPointer(filterPositionAttribute, 2, GL_FLOAT, 0, 0, vertices);
glVertexAttribPointer(filterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
[firstInputFramebuffer unlock];
firstInputFramebuffer = nil;
// This assumes that any two-pass filter that says it desires monochrome input is using the first pass for a luminance conversion, which can be dropped
// if (!currentlyReceivingMonochromeInput)
// {
// Run the first stage of the two-pass filter
// [super renderToTextureWithVertices:vertices textureCoordinates:textureCoordinates];
// }
// Run the second stage of the two-pass filter
secondOutputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:[self sizeOfFBO] textureOptions:self.outputTextureOptions onlyTexture:NO];
[secondOutputFramebuffer activateFramebuffer];
[GPUImageContext setActiveShaderProgram:secondFilterProgram];
if (usingNextFrameForImageCapture)
{
[secondOutputFramebuffer lock];
}
[self setUniformsForProgramAtIndex:1];
glActiveTexture(GL_TEXTURE3);
glBindTexture(GL_TEXTURE_2D, [outputFramebuffer texture]);
glVertexAttribPointer(secondFilterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, [[self class] textureCoordinatesForRotation:kGPUImageNoRotation]);
// TODO: Re-enable this monochrome optimization
// if (!currentlyReceivingMonochromeInput)
// {
// glActiveTexture(GL_TEXTURE3);
// glBindTexture(GL_TEXTURE_2D, outputTexture);
// glVertexAttribPointer(secondFilterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, [[self class] textureCoordinatesForRotation:kGPUImageNoRotation]);
// }
// else
// {
// glActiveTexture(GL_TEXTURE3);
// glBindTexture(GL_TEXTURE_2D, sourceTexture);
// glVertexAttribPointer(secondFilterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);
// }
glUniform1i(secondFilterInputTextureUniform, 3);
glVertexAttribPointer(secondFilterPositionAttribute, 2, GL_FLOAT, 0, 0, vertices);
glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
[outputFramebuffer unlock];
outputFramebuffer = nil;
if (usingNextFrameForImageCapture)
{
dispatch_semaphore_signal(imageCaptureSemaphore);
}
}
- (void)setAndExecuteUniformStateCallbackAtIndex:(GLint)uniform forProgram:(GLProgram *)shaderProgram toBlock:(dispatch_block_t)uniformStateBlock;
{
// TODO: Deal with the fact that two-pass filters may have the same shader program identifier
if (shaderProgram == filterProgram)
{
[uniformStateRestorationBlocks setObject:[uniformStateBlock copy] forKey:[NSNumber numberWithInt:uniform]];
}
else
{
[secondProgramUniformStateRestorationBlocks setObject:[uniformStateBlock copy] forKey:[NSNumber numberWithInt:uniform]];
}
uniformStateBlock();
}
- (void)setUniformsForProgramAtIndex:(NSUInteger)programIndex;
{
if (programIndex == 0)
{
[uniformStateRestorationBlocks enumerateKeysAndObjectsUsingBlock:^(id key, id obj, BOOL *stop){
dispatch_block_t currentBlock = obj;
currentBlock();
}];
}
else
{
[secondProgramUniformStateRestorationBlocks enumerateKeysAndObjectsUsingBlock:^(id key, id obj, BOOL *stop){
dispatch_block_t currentBlock = obj;
currentBlock();
}];
}
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageTwoPassFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 1,629 |
```objective-c
#import "GPUImageFilter.h"
@interface GPUImageDirectionalNonMaximumSuppressionFilter : GPUImageFilter
{
GLint texelWidthUniform, texelHeightUniform;
GLint upperThresholdUniform, lowerThresholdUniform;
BOOL hasOverriddenImageSizeFactor;
}
// The texel width and height determines how far out to sample from this texel. By default, this is the normalized width of a pixel, but this can be overridden for different effects.
@property(readwrite, nonatomic) CGFloat texelWidth;
@property(readwrite, nonatomic) CGFloat texelHeight;
// These thresholds set cutoffs for the intensities that definitely get registered (upper threshold) and those that definitely don't (lower threshold)
@property(readwrite, nonatomic) CGFloat upperThreshold;
@property(readwrite, nonatomic) CGFloat lowerThreshold;
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageDirectionalNonMaximumSuppressionFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 170 |
```objective-c
#import "GPUImageFilterGroup.h"
@class GPUImageGaussianBlurFilter;
/** A Gaussian blur that preserves focus within a circular region
*/
@interface GPUImageGaussianSelectiveBlurFilter : GPUImageFilterGroup
{
GPUImageGaussianBlurFilter *blurFilter;
GPUImageFilter *selectiveFocusFilter;
BOOL hasOverriddenAspectRatio;
}
/** The radius of the circular area being excluded from the blur
*/
@property (readwrite, nonatomic) CGFloat excludeCircleRadius;
/** The center of the circular area being excluded from the blur
*/
@property (readwrite, nonatomic) CGPoint excludeCirclePoint;
/** The size of the area between the blurred portion and the clear circle
*/
@property (readwrite, nonatomic) CGFloat excludeBlurSize;
/** A radius in pixels to use for the blur, with a default of 5.0. This adjusts the sigma variable in the Gaussian distribution function.
*/
@property (readwrite, nonatomic) CGFloat blurRadiusInPixels;
/** The aspect ratio of the image, used to adjust the circularity of the in-focus region. By default, this matches the image aspect ratio, but you can override this value.
*/
@property (readwrite, nonatomic) CGFloat aspectRatio;
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageGaussianSelectiveBlurFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 250 |
```objective-c
#import "GPUImageFilterGroup.h"
@class GPUImageGaussianBlurFilter;
/// A simulated tilt shift lens effect
@interface GPUImageTiltShiftFilter : GPUImageFilterGroup
{
GPUImageGaussianBlurFilter *blurFilter;
GPUImageFilter *tiltShiftFilter;
}
/// The radius of the underlying blur, in pixels. This is 7.0 by default.
@property(readwrite, nonatomic) CGFloat blurRadiusInPixels;
/// The normalized location of the top of the in-focus area in the image, this value should be lower than bottomFocusLevel, default 0.4
@property(readwrite, nonatomic) CGFloat topFocusLevel;
/// The normalized location of the bottom of the in-focus area in the image, this value should be higher than topFocusLevel, default 0.6
@property(readwrite, nonatomic) CGFloat bottomFocusLevel;
/// The rate at which the image gets blurry away from the in-focus region, default 0.2
@property(readwrite, nonatomic) CGFloat focusFallOffRate;
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageTiltShiftFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 217 |
```objective-c
#import "GPUImageColorMatrixFilter.h"
/// Simple sepia tone filter
@interface GPUImageSepiaFilter : GPUImageColorMatrixFilter
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageSepiaFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 32 |
```objective-c
#import "GPUImageRGBDilationFilter.h"
#import "GPUImageDilationFilter.h"
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageRGBDilationRadiusOneFragmentShaderString = SHADER_STRING
(
precision highp float;
varying vec2 centerTextureCoordinate;
varying vec2 oneStepPositiveTextureCoordinate;
varying vec2 oneStepNegativeTextureCoordinate;
uniform sampler2D inputImageTexture;
void main()
{
lowp vec4 centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate);
lowp vec4 oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate);
lowp vec4 oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate);
lowp vec4 maxValue = max(centerIntensity, oneStepPositiveIntensity);
gl_FragColor = max(maxValue, oneStepNegativeIntensity);
}
);
NSString *const kGPUImageRGBDilationRadiusTwoFragmentShaderString = SHADER_STRING
(
precision highp float;
varying vec2 centerTextureCoordinate;
varying vec2 oneStepPositiveTextureCoordinate;
varying vec2 oneStepNegativeTextureCoordinate;
varying vec2 twoStepsPositiveTextureCoordinate;
varying vec2 twoStepsNegativeTextureCoordinate;
uniform sampler2D inputImageTexture;
void main()
{
lowp vec4 centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate);
lowp vec4 oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate);
lowp vec4 oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate);
lowp vec4 twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate);
lowp vec4 twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate);
lowp vec4 maxValue = max(centerIntensity, oneStepPositiveIntensity);
maxValue = max(maxValue, oneStepNegativeIntensity);
maxValue = max(maxValue, twoStepsPositiveIntensity);
maxValue = max(maxValue, twoStepsNegativeIntensity);
gl_FragColor = max(maxValue, twoStepsNegativeIntensity);
}
);
NSString *const kGPUImageRGBDilationRadiusThreeFragmentShaderString = SHADER_STRING
(
precision highp float;
varying vec2 centerTextureCoordinate;
varying vec2 oneStepPositiveTextureCoordinate;
varying vec2 oneStepNegativeTextureCoordinate;
varying vec2 twoStepsPositiveTextureCoordinate;
varying vec2 twoStepsNegativeTextureCoordinate;
varying vec2 threeStepsPositiveTextureCoordinate;
varying vec2 threeStepsNegativeTextureCoordinate;
uniform sampler2D inputImageTexture;
void main()
{
lowp vec4 centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate);
lowp vec4 oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate);
lowp vec4 oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate);
lowp vec4 twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate);
lowp vec4 twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate);
lowp vec4 threeStepsPositiveIntensity = texture2D(inputImageTexture, threeStepsPositiveTextureCoordinate);
lowp vec4 threeStepsNegativeIntensity = texture2D(inputImageTexture, threeStepsNegativeTextureCoordinate);
lowp vec4 maxValue = max(centerIntensity, oneStepPositiveIntensity);
maxValue = max(maxValue, oneStepNegativeIntensity);
maxValue = max(maxValue, twoStepsPositiveIntensity);
maxValue = max(maxValue, twoStepsNegativeIntensity);
maxValue = max(maxValue, threeStepsPositiveIntensity);
gl_FragColor = max(maxValue, threeStepsNegativeIntensity);
}
);
NSString *const kGPUImageRGBDilationRadiusFourFragmentShaderString = SHADER_STRING
(
precision highp float;
varying vec2 centerTextureCoordinate;
varying vec2 oneStepPositiveTextureCoordinate;
varying vec2 oneStepNegativeTextureCoordinate;
varying vec2 twoStepsPositiveTextureCoordinate;
varying vec2 twoStepsNegativeTextureCoordinate;
varying vec2 threeStepsPositiveTextureCoordinate;
varying vec2 threeStepsNegativeTextureCoordinate;
varying vec2 fourStepsPositiveTextureCoordinate;
varying vec2 fourStepsNegativeTextureCoordinate;
uniform sampler2D inputImageTexture;
void main()
{
lowp vec4 centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate);
lowp vec4 oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate);
lowp vec4 oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate);
lowp vec4 twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate);
lowp vec4 twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate);
lowp vec4 threeStepsPositiveIntensity = texture2D(inputImageTexture, threeStepsPositiveTextureCoordinate);
lowp vec4 threeStepsNegativeIntensity = texture2D(inputImageTexture, threeStepsNegativeTextureCoordinate);
lowp vec4 fourStepsPositiveIntensity = texture2D(inputImageTexture, fourStepsPositiveTextureCoordinate);
lowp vec4 fourStepsNegativeIntensity = texture2D(inputImageTexture, fourStepsNegativeTextureCoordinate);
lowp vec4 maxValue = max(centerIntensity, oneStepPositiveIntensity);
maxValue = max(maxValue, oneStepNegativeIntensity);
maxValue = max(maxValue, twoStepsPositiveIntensity);
maxValue = max(maxValue, twoStepsNegativeIntensity);
maxValue = max(maxValue, threeStepsPositiveIntensity);
maxValue = max(maxValue, threeStepsNegativeIntensity);
maxValue = max(maxValue, fourStepsPositiveIntensity);
gl_FragColor = max(maxValue, fourStepsNegativeIntensity);
}
);
#else
NSString *const kGPUImageRGBDilationRadiusOneFragmentShaderString = SHADER_STRING
(
varying vec2 centerTextureCoordinate;
varying vec2 oneStepPositiveTextureCoordinate;
varying vec2 oneStepNegativeTextureCoordinate;
uniform sampler2D inputImageTexture;
void main()
{
vec4 centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate);
vec4 oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate);
vec4 oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate);
vec4 maxValue = max(centerIntensity, oneStepPositiveIntensity);
gl_FragColor = max(maxValue, oneStepNegativeIntensity);
}
);
NSString *const kGPUImageRGBDilationRadiusTwoFragmentShaderString = SHADER_STRING
(
varying vec2 centerTextureCoordinate;
varying vec2 oneStepPositiveTextureCoordinate;
varying vec2 oneStepNegativeTextureCoordinate;
varying vec2 twoStepsPositiveTextureCoordinate;
varying vec2 twoStepsNegativeTextureCoordinate;
uniform sampler2D inputImageTexture;
void main()
{
vec4 centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate);
vec4 oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate);
vec4 oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate);
vec4 twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate);
vec4 twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate);
vec4 maxValue = max(centerIntensity, oneStepPositiveIntensity);
maxValue = max(maxValue, oneStepNegativeIntensity);
maxValue = max(maxValue, twoStepsPositiveIntensity);
maxValue = max(maxValue, twoStepsNegativeIntensity);
gl_FragColor = max(maxValue, twoStepsNegativeIntensity);
}
);
NSString *const kGPUImageRGBDilationRadiusThreeFragmentShaderString = SHADER_STRING
(
varying vec2 centerTextureCoordinate;
varying vec2 oneStepPositiveTextureCoordinate;
varying vec2 oneStepNegativeTextureCoordinate;
varying vec2 twoStepsPositiveTextureCoordinate;
varying vec2 twoStepsNegativeTextureCoordinate;
varying vec2 threeStepsPositiveTextureCoordinate;
varying vec2 threeStepsNegativeTextureCoordinate;
uniform sampler2D inputImageTexture;
void main()
{
vec4 centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate);
vec4 oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate);
vec4 oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate);
vec4 twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate);
vec4 twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate);
vec4 threeStepsPositiveIntensity = texture2D(inputImageTexture, threeStepsPositiveTextureCoordinate);
vec4 threeStepsNegativeIntensity = texture2D(inputImageTexture, threeStepsNegativeTextureCoordinate);
vec4 maxValue = max(centerIntensity, oneStepPositiveIntensity);
maxValue = max(maxValue, oneStepNegativeIntensity);
maxValue = max(maxValue, twoStepsPositiveIntensity);
maxValue = max(maxValue, twoStepsNegativeIntensity);
maxValue = max(maxValue, threeStepsPositiveIntensity);
gl_FragColor = max(maxValue, threeStepsNegativeIntensity);
}
);
NSString *const kGPUImageRGBDilationRadiusFourFragmentShaderString = SHADER_STRING
(
varying vec2 centerTextureCoordinate;
varying vec2 oneStepPositiveTextureCoordinate;
varying vec2 oneStepNegativeTextureCoordinate;
varying vec2 twoStepsPositiveTextureCoordinate;
varying vec2 twoStepsNegativeTextureCoordinate;
varying vec2 threeStepsPositiveTextureCoordinate;
varying vec2 threeStepsNegativeTextureCoordinate;
varying vec2 fourStepsPositiveTextureCoordinate;
varying vec2 fourStepsNegativeTextureCoordinate;
uniform sampler2D inputImageTexture;
void main()
{
vec4 centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate);
vec4 oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate);
vec4 oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate);
vec4 twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate);
vec4 twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate);
vec4 threeStepsPositiveIntensity = texture2D(inputImageTexture, threeStepsPositiveTextureCoordinate);
vec4 threeStepsNegativeIntensity = texture2D(inputImageTexture, threeStepsNegativeTextureCoordinate);
vec4 fourStepsPositiveIntensity = texture2D(inputImageTexture, fourStepsPositiveTextureCoordinate);
vec4 fourStepsNegativeIntensity = texture2D(inputImageTexture, fourStepsNegativeTextureCoordinate);
vec4 maxValue = max(centerIntensity, oneStepPositiveIntensity);
maxValue = max(maxValue, oneStepNegativeIntensity);
maxValue = max(maxValue, twoStepsPositiveIntensity);
maxValue = max(maxValue, twoStepsNegativeIntensity);
maxValue = max(maxValue, threeStepsPositiveIntensity);
maxValue = max(maxValue, threeStepsNegativeIntensity);
maxValue = max(maxValue, fourStepsPositiveIntensity);
gl_FragColor = max(maxValue, fourStepsNegativeIntensity);
}
);
#endif
@implementation GPUImageRGBDilationFilter
#pragma mark -
#pragma mark Initialization and teardown
- (id)initWithRadius:(NSUInteger)dilationRadius;
{
NSString *fragmentShaderForThisRadius = nil;
NSString *vertexShaderForThisRadius = nil;
switch (dilationRadius)
{
case 0:
case 1:
{
vertexShaderForThisRadius = kGPUImageDilationRadiusOneVertexShaderString;
fragmentShaderForThisRadius = kGPUImageRGBDilationRadiusOneFragmentShaderString;
}; break;
case 2:
{
vertexShaderForThisRadius = kGPUImageDilationRadiusTwoVertexShaderString;
fragmentShaderForThisRadius = kGPUImageRGBDilationRadiusTwoFragmentShaderString;
}; break;
case 3:
{
vertexShaderForThisRadius = kGPUImageDilationRadiusThreeVertexShaderString;
fragmentShaderForThisRadius = kGPUImageRGBDilationRadiusThreeFragmentShaderString;
}; break;
case 4:
{
vertexShaderForThisRadius = kGPUImageDilationRadiusFourVertexShaderString;
fragmentShaderForThisRadius = kGPUImageRGBDilationRadiusFourFragmentShaderString;
}; break;
default:
{
vertexShaderForThisRadius = kGPUImageDilationRadiusFourVertexShaderString;
fragmentShaderForThisRadius = kGPUImageRGBDilationRadiusFourFragmentShaderString;
}; break;
}
if (!(self = [super initWithFirstStageVertexShaderFromString:vertexShaderForThisRadius firstStageFragmentShaderFromString:fragmentShaderForThisRadius secondStageVertexShaderFromString:vertexShaderForThisRadius secondStageFragmentShaderFromString:fragmentShaderForThisRadius]))
{
return nil;
}
return self;
}
- (id)init;
{
if (!(self = [self initWithRadius:1]))
{
return nil;
}
return self;
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageRGBDilationFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 2,765 |
```objective-c
#import "GPUImageFilter.h"
/*
* The haze filter can be used to add or remove haze (similar to a UV filter)
*
* @author Alaric Cole
* @creationDate 03/10/12
*
*/
/** The haze filter can be used to add or remove haze
This is similar to a UV filter
*/
@interface GPUImageHazeFilter : GPUImageFilter
{
GLint distanceUniform;
GLint slopeUniform;
}
/** Strength of the color applied. Default 0. Values between -.3 and .3 are best
*/
@property(readwrite, nonatomic) CGFloat distance;
/** Amount of color change. Default 0. Values between -.3 and .3 are best
*/
@property(readwrite, nonatomic) CGFloat slope;
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageHazeFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 160 |
```objective-c
#import "GPUImagePosterizeFilter.h"
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImagePosterizeFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform highp float colorLevels;
void main()
{
highp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
gl_FragColor = floor((textureColor * colorLevels) + vec4(0.5)) / colorLevels;
}
);
#else
NSString *const kGPUImagePosterizeFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform float colorLevels;
void main()
{
vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
gl_FragColor = floor((textureColor * colorLevels) + vec4(0.5)) / colorLevels;
}
);
#endif
@implementation GPUImagePosterizeFilter
@synthesize colorLevels = _colorLevels;
#pragma mark -
#pragma mark Initialization
- (id)init;
{
if (!(self = [super initWithFragmentShaderFromString:kGPUImagePosterizeFragmentShaderString]))
{
return nil;
}
colorLevelsUniform = [filterProgram uniformIndex:@"colorLevels"];
self.colorLevels = 10;
return self;
}
#pragma mark -
#pragma mark Accessors
- (void)setColorLevels:(NSUInteger)newValue;
{
_colorLevels = newValue;
[self setFloat:_colorLevels forUniform:colorLevelsUniform program:filterProgram];
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImagePosterizeFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 342 |
```objective-c
#import "GPUImageHistogramGenerator.h"
NSString *const kGPUImageHistogramGeneratorVertexShaderString = SHADER_STRING
(
attribute vec4 position;
attribute vec4 inputTextureCoordinate;
varying vec2 textureCoordinate;
varying float height;
void main()
{
gl_Position = position;
textureCoordinate = vec2(inputTextureCoordinate.x, 0.5);
height = 1.0 - inputTextureCoordinate.y;
}
);
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageHistogramGeneratorFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
varying highp float height;
uniform sampler2D inputImageTexture;
uniform lowp vec4 backgroundColor;
void main()
{
lowp vec3 colorChannels = texture2D(inputImageTexture, textureCoordinate).rgb;
lowp vec4 heightTest = vec4(step(height, colorChannels), 1.0);
gl_FragColor = mix(backgroundColor, heightTest, heightTest.r + heightTest.g + heightTest.b);
}
);
#else
NSString *const kGPUImageHistogramGeneratorFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
varying float height;
uniform sampler2D inputImageTexture;
uniform vec4 backgroundColor;
void main()
{
vec3 colorChannels = texture2D(inputImageTexture, textureCoordinate).rgb;
vec4 heightTest = vec4(step(height, colorChannels), 1.0);
gl_FragColor = mix(backgroundColor, heightTest, heightTest.r + heightTest.g + heightTest.b);
}
);
#endif
@implementation GPUImageHistogramGenerator
#pragma mark -
#pragma mark Initialization and teardown
- (id)init;
{
if (!(self = [super initWithVertexShaderFromString:kGPUImageHistogramGeneratorVertexShaderString fragmentShaderFromString:kGPUImageHistogramGeneratorFragmentShaderString]))
{
return nil;
}
backgroundColorUniform = [filterProgram uniformIndex:@"backgroundColor"];
[self setBackgroundColorRed:0.0 green:0.0 blue:0.0 alpha:0.0];
return self;
}
#pragma mark -
#pragma mark Accessors
- (void)setBackgroundColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent alpha:(GLfloat)alphaComponent;
{
// GLfloat backgroundColor[4];
// backgroundColor[0] = redComponent;
// backgroundColor[1] = greenComponent;
// backgroundColor[2] = blueComponent;
// backgroundColor[3] = alphaComponent;
GPUVector4 backgroundColor = {redComponent, greenComponent, blueComponent, alphaComponent};
[self setVec4:backgroundColor forUniform:backgroundColorUniform program:filterProgram];
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageHistogramGenerator.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 569 |
```objective-c
#import "GPUImage3x3TextureSamplingFilter.h"
@interface GPUImageThresholdedNonMaximumSuppressionFilter : GPUImage3x3TextureSamplingFilter
{
GLint thresholdUniform;
}
/** Any local maximum above this threshold will be white, and anything below black. Ranges from 0.0 to 1.0, with 0.8 as the default
*/
@property(readwrite, nonatomic) CGFloat threshold;
- (id)initWithPackedColorspace:(BOOL)inputUsesPackedColorspace;
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageThresholdedNonMaximumSuppressionFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 108 |
```objective-c
#import <Foundation/Foundation.h>
#import "GPUImageOutput.h"
@interface GPUImageFilterPipeline : NSObject
{
NSString *stringValue;
}
@property (strong) NSMutableArray *filters;
@property (strong) GPUImageOutput *input;
@property (strong) id <GPUImageInput> output;
- (id) initWithOrderedFilters:(NSArray*) filters input:(GPUImageOutput*)input output:(id <GPUImageInput>)output;
- (id) initWithConfiguration:(NSDictionary*) configuration input:(GPUImageOutput*)input output:(id <GPUImageInput>)output;
- (id) initWithConfigurationFile:(NSURL*) configuration input:(GPUImageOutput*)input output:(id <GPUImageInput>)output;
- (void) addFilter:(GPUImageOutput<GPUImageInput> *)filter;
- (void) addFilter:(GPUImageOutput<GPUImageInput> *)filter atIndex:(NSUInteger)insertIndex;
- (void) replaceFilterAtIndex:(NSUInteger)index withFilter:(GPUImageOutput<GPUImageInput> *)filter;
- (void) replaceAllFilters:(NSArray *) newFilters;
- (void) removeFilter:(GPUImageOutput<GPUImageInput> *)filter;
- (void) removeFilterAtIndex:(NSUInteger)index;
- (void) removeAllFilters;
- (UIImage *) currentFilteredFrame;
- (UIImage *) currentFilteredFrameWithOrientation:(UIImageOrientation)imageOrientation;
- (CGImageRef) newCGImageFromCurrentFilteredFrame;
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageFilterPipeline.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 297 |
```objective-c
#import "GPUImageHistogramFilter.h"
// Unlike other filters, this one uses a grid of GL_POINTs to sample the incoming image in a grid. A custom vertex shader reads the color in the texture at its position
// and outputs a bin position in the final histogram as the vertex position. That point is then written into the image of the histogram using translucent pixels.
// The degree of translucency is controlled by the scalingFactor, which lets you adjust the dynamic range of the histogram. The histogram can only be generated for one
// color channel or luminance value at a time.
//
// This is based on this implementation: path_to_url
//
// Or at least that's how it would work if iOS could read from textures in a vertex shader, which it can't. Therefore, I read the texture data down from the
// incoming frame and process the texture colors as vertices.
NSString *const kGPUImageRedHistogramSamplingVertexShaderString = SHADER_STRING
(
attribute vec4 position;
varying vec3 colorFactor;
void main()
{
colorFactor = vec3(1.0, 0.0, 0.0);
gl_Position = vec4(-1.0 + (position.x * 0.0078125), 0.0, 0.0, 1.0);
gl_PointSize = 1.0;
}
);
NSString *const kGPUImageGreenHistogramSamplingVertexShaderString = SHADER_STRING
(
attribute vec4 position;
varying vec3 colorFactor;
void main()
{
colorFactor = vec3(0.0, 1.0, 0.0);
gl_Position = vec4(-1.0 + (position.y * 0.0078125), 0.0, 0.0, 1.0);
gl_PointSize = 1.0;
}
);
NSString *const kGPUImageBlueHistogramSamplingVertexShaderString = SHADER_STRING
(
attribute vec4 position;
varying vec3 colorFactor;
void main()
{
colorFactor = vec3(0.0, 0.0, 1.0);
gl_Position = vec4(-1.0 + (position.z * 0.0078125), 0.0, 0.0, 1.0);
gl_PointSize = 1.0;
}
);
NSString *const kGPUImageLuminanceHistogramSamplingVertexShaderString = SHADER_STRING
(
attribute vec4 position;
varying vec3 colorFactor;
const vec3 W = vec3(0.2125, 0.7154, 0.0721);
void main()
{
float luminance = dot(position.xyz, W);
colorFactor = vec3(1.0, 1.0, 1.0);
gl_Position = vec4(-1.0 + (luminance * 0.0078125), 0.0, 0.0, 1.0);
gl_PointSize = 1.0;
}
);
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageHistogramAccumulationFragmentShaderString = SHADER_STRING
(
const lowp float scalingFactor = 1.0 / 256.0;
varying lowp vec3 colorFactor;
void main()
{
gl_FragColor = vec4(colorFactor * scalingFactor , 1.0);
}
);
#else
NSString *const kGPUImageHistogramAccumulationFragmentShaderString = SHADER_STRING
(
const float scalingFactor = 1.0 / 256.0;
varying vec3 colorFactor;
void main()
{
gl_FragColor = vec4(colorFactor * scalingFactor , 1.0);
}
);
#endif
@implementation GPUImageHistogramFilter
@synthesize downsamplingFactor = _downsamplingFactor;
#pragma mark -
#pragma mark Initialization and teardown
- (id)initWithHistogramType:(GPUImageHistogramType)newHistogramType;
{
switch (newHistogramType)
{
case kGPUImageHistogramRed:
{
if (!(self = [super initWithVertexShaderFromString:kGPUImageRedHistogramSamplingVertexShaderString fragmentShaderFromString:kGPUImageHistogramAccumulationFragmentShaderString]))
{
return nil;
}
}; break;
case kGPUImageHistogramGreen:
{
if (!(self = [super initWithVertexShaderFromString:kGPUImageGreenHistogramSamplingVertexShaderString fragmentShaderFromString:kGPUImageHistogramAccumulationFragmentShaderString]))
{
return nil;
}
}; break;
case kGPUImageHistogramBlue:
{
if (!(self = [super initWithVertexShaderFromString:kGPUImageBlueHistogramSamplingVertexShaderString fragmentShaderFromString:kGPUImageHistogramAccumulationFragmentShaderString]))
{
return nil;
}
}; break;
case kGPUImageHistogramLuminance:
{
if (!(self = [super initWithVertexShaderFromString:kGPUImageLuminanceHistogramSamplingVertexShaderString fragmentShaderFromString:kGPUImageHistogramAccumulationFragmentShaderString]))
{
return nil;
}
}; break;
case kGPUImageHistogramRGB:
{
if (!(self = [super initWithVertexShaderFromString:kGPUImageRedHistogramSamplingVertexShaderString fragmentShaderFromString:kGPUImageHistogramAccumulationFragmentShaderString]))
{
return nil;
}
runSynchronouslyOnVideoProcessingQueue(^{
[GPUImageContext useImageProcessingContext];
secondFilterProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageGreenHistogramSamplingVertexShaderString fragmentShaderString:kGPUImageHistogramAccumulationFragmentShaderString];
thirdFilterProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageBlueHistogramSamplingVertexShaderString fragmentShaderString:kGPUImageHistogramAccumulationFragmentShaderString];
if (!secondFilterProgram.initialized)
{
[self initializeSecondaryAttributes];
if (![secondFilterProgram link])
{
NSString *progLog = [secondFilterProgram programLog];
NSLog(@"Program link log: %@", progLog);
NSString *fragLog = [secondFilterProgram fragmentShaderLog];
NSLog(@"Fragment shader compile log: %@", fragLog);
NSString *vertLog = [secondFilterProgram vertexShaderLog];
NSLog(@"Vertex shader compile log: %@", vertLog);
filterProgram = nil;
NSAssert(NO, @"Filter shader link failed");
}
[GPUImageContext setActiveShaderProgram:secondFilterProgram];
glEnableVertexAttribArray(secondFilterPositionAttribute);
if (![thirdFilterProgram link])
{
NSString *progLog = [secondFilterProgram programLog];
NSLog(@"Program link log: %@", progLog);
NSString *fragLog = [secondFilterProgram fragmentShaderLog];
NSLog(@"Fragment shader compile log: %@", fragLog);
NSString *vertLog = [secondFilterProgram vertexShaderLog];
NSLog(@"Vertex shader compile log: %@", vertLog);
filterProgram = nil;
NSAssert(NO, @"Filter shader link failed");
}
}
secondFilterPositionAttribute = [secondFilterProgram attributeIndex:@"position"];
thirdFilterPositionAttribute = [thirdFilterProgram attributeIndex:@"position"];
[GPUImageContext setActiveShaderProgram:thirdFilterProgram];
glEnableVertexAttribArray(thirdFilterPositionAttribute);
});
}; break;
}
histogramType = newHistogramType;
self.downsamplingFactor = 16;
return self;
}
- (id)init;
{
if (!(self = [self initWithHistogramType:kGPUImageHistogramRGB]))
{
return nil;
}
return self;
}
- (void)initializeSecondaryAttributes;
{
[secondFilterProgram addAttribute:@"position"];
[thirdFilterProgram addAttribute:@"position"];
}
- (void)dealloc;
{
if (vertexSamplingCoordinates != NULL && ![GPUImageContext supportsFastTextureUpload])
{
free(vertexSamplingCoordinates);
}
}
#pragma mark -
#pragma mark Rendering
- (CGSize)sizeOfFBO;
{
return CGSizeMake(256.0, 3.0);
}
- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex;
{
[self renderToTextureWithVertices:NULL textureCoordinates:NULL];
[self informTargetsAboutNewFrameAtTime:frameTime];
}
- (CGSize)outputFrameSize;
{
return [self sizeOfFBO];
}
- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;
{
if (self.preventRendering)
{
return;
}
inputTextureSize = newSize;
}
- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;
{
inputRotation = kGPUImageNoRotation;
}
- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;
{
// we need a normal color texture for this filter
NSAssert(self.outputTextureOptions.internalFormat == GL_RGBA, @"The output texture format for this filter must be GL_RGBA.");
NSAssert(self.outputTextureOptions.type == GL_UNSIGNED_BYTE, @"The type of the output texture of this filter must be GL_UNSIGNED_BYTE.");
if (self.preventRendering)
{
[firstInputFramebuffer unlock];
return;
}
[GPUImageContext useImageProcessingContext];
if ([GPUImageContext supportsFastTextureUpload])
{
glFinish();
vertexSamplingCoordinates = [firstInputFramebuffer byteBuffer];
} else {
if (vertexSamplingCoordinates == NULL)
{
vertexSamplingCoordinates = calloc(inputTextureSize.width * inputTextureSize.height * 4, sizeof(GLubyte));
}
glReadPixels(0, 0, inputTextureSize.width, inputTextureSize.height, GL_RGBA, GL_UNSIGNED_BYTE, vertexSamplingCoordinates);
}
outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:[self sizeOfFBO] textureOptions:self.outputTextureOptions onlyTexture:NO];
[outputFramebuffer activateFramebuffer];
if (usingNextFrameForImageCapture)
{
[outputFramebuffer lock];
}
[GPUImageContext setActiveShaderProgram:filterProgram];
glClearColor(0.0, 0.0, 0.0, 1.0);
glClear(GL_COLOR_BUFFER_BIT);
glBlendEquation(GL_FUNC_ADD);
glBlendFunc(GL_ONE, GL_ONE);
glEnable(GL_BLEND);
glVertexAttribPointer(filterPositionAttribute, 4, GL_UNSIGNED_BYTE, 0, ((unsigned int)_downsamplingFactor - 1) * 4, vertexSamplingCoordinates);
glDrawArrays(GL_POINTS, 0, inputTextureSize.width * inputTextureSize.height / (CGFloat)_downsamplingFactor);
if (histogramType == kGPUImageHistogramRGB)
{
[GPUImageContext setActiveShaderProgram:secondFilterProgram];
glVertexAttribPointer(secondFilterPositionAttribute, 4, GL_UNSIGNED_BYTE, 0, ((unsigned int)_downsamplingFactor - 1) * 4, vertexSamplingCoordinates);
glDrawArrays(GL_POINTS, 0, inputTextureSize.width * inputTextureSize.height / (CGFloat)_downsamplingFactor);
[GPUImageContext setActiveShaderProgram:thirdFilterProgram];
glVertexAttribPointer(thirdFilterPositionAttribute, 4, GL_UNSIGNED_BYTE, 0, ((unsigned int)_downsamplingFactor - 1) * 4, vertexSamplingCoordinates);
glDrawArrays(GL_POINTS, 0, inputTextureSize.width * inputTextureSize.height / (CGFloat)_downsamplingFactor);
}
glDisable(GL_BLEND);
[firstInputFramebuffer unlock];
if (usingNextFrameForImageCapture)
{
dispatch_semaphore_signal(imageCaptureSemaphore);
}
}
#pragma mark -
#pragma mark Accessors
//- (void)setScalingFactor:(CGFloat)newValue;
//{
// _scalingFactor = newValue;
//
// [GPUImageContext useImageProcessingContext];
// [filterProgram use];
// glUniform1f(scalingFactorUniform, _scalingFactor);
//}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageHistogramFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 2,528 |
```objective-c
#import "GPUImageFilter.h"
@interface GPUImageBuffer : GPUImageFilter
{
NSMutableArray *bufferedFramebuffers;
}
@property(readwrite, nonatomic) NSUInteger bufferSize;
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageBuffer.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 38 |
```objective-c
#import "GPUImageUIElement.h"
@interface GPUImageUIElement ()
{
UIView *view;
CALayer *layer;
CGSize previousLayerSizeInPixels;
CMTime time;
NSTimeInterval actualTimeOfLastUpdate;
}
@end
@implementation GPUImageUIElement
#pragma mark -
#pragma mark Initialization and teardown
- (id)initWithView:(UIView *)inputView;
{
if (!(self = [super init]))
{
return nil;
}
view = inputView;
layer = inputView.layer;
previousLayerSizeInPixels = CGSizeZero;
[self update];
return self;
}
- (id)initWithLayer:(CALayer *)inputLayer;
{
if (!(self = [super init]))
{
return nil;
}
view = nil;
layer = inputLayer;
previousLayerSizeInPixels = CGSizeZero;
[self update];
return self;
}
#pragma mark -
#pragma mark Layer management
- (CGSize)layerSizeInPixels;
{
CGSize pointSize = layer.bounds.size;
return CGSizeMake(layer.contentsScale * pointSize.width, layer.contentsScale * pointSize.height);
}
- (void)update;
{
[self updateWithTimestamp:kCMTimeIndefinite];
}
- (void)updateUsingCurrentTime;
{
if(CMTIME_IS_INVALID(time)) {
time = CMTimeMakeWithSeconds(0, 600);
actualTimeOfLastUpdate = [NSDate timeIntervalSinceReferenceDate];
} else {
NSTimeInterval now = [NSDate timeIntervalSinceReferenceDate];
NSTimeInterval diff = now - actualTimeOfLastUpdate;
time = CMTimeAdd(time, CMTimeMakeWithSeconds(diff, 600));
actualTimeOfLastUpdate = now;
}
[self updateWithTimestamp:time];
}
- (void)updateWithTimestamp:(CMTime)frameTime;
{
[GPUImageContext useImageProcessingContext];
CGSize layerPixelSize = [self layerSizeInPixels];
GLubyte *imageData = (GLubyte *) calloc(1, (int)layerPixelSize.width * (int)layerPixelSize.height * 4);
CGColorSpaceRef genericRGBColorspace = CGColorSpaceCreateDeviceRGB();
CGContextRef imageContext = CGBitmapContextCreate(imageData, (int)layerPixelSize.width, (int)layerPixelSize.height, 8, (int)layerPixelSize.width * 4, genericRGBColorspace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
// CGContextRotateCTM(imageContext, M_PI_2);
CGContextTranslateCTM(imageContext, 0.0f, layerPixelSize.height);
CGContextScaleCTM(imageContext, layer.contentsScale, -layer.contentsScale);
// CGContextSetBlendMode(imageContext, kCGBlendModeCopy); // From Technical Q&A QA1708: path_to_url#qa/qa1708/_index.html
[layer renderInContext:imageContext];
CGContextRelease(imageContext);
CGColorSpaceRelease(genericRGBColorspace);
// TODO: This may not work
outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:layerPixelSize textureOptions:self.outputTextureOptions onlyTexture:YES];
glBindTexture(GL_TEXTURE_2D, [outputFramebuffer texture]);
// no need to use self.outputTextureOptions here, we always need these texture options
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, (int)layerPixelSize.width, (int)layerPixelSize.height, 0, GL_BGRA, GL_UNSIGNED_BYTE, imageData);
free(imageData);
for (id<GPUImageInput> currentTarget in targets)
{
if (currentTarget != self.targetToIgnoreForUpdates)
{
NSInteger indexOfObject = [targets indexOfObject:currentTarget];
NSInteger textureIndexOfTarget = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
[currentTarget setInputSize:layerPixelSize atIndex:textureIndexOfTarget];
[currentTarget newFrameReadyAtTime:frameTime atIndex:textureIndexOfTarget];
}
}
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageUIElement.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 880 |
```objective-c
#import "GPUImageFilter.h"
@interface GPUImageJFAVoronoiFilter : GPUImageFilter
{
GLuint secondFilterOutputTexture;
GLuint secondFilterFramebuffer;
GLint sampleStepUniform;
GLint sizeUniform;
NSUInteger numPasses;
}
@property (nonatomic, readwrite) CGSize sizeInPixels;
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageJFAVoronoiFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 69 |
```objective-c
#import "GPUImageFilter.h"
/** Creates a stretch distortion of the image
*/
@interface GPUImageStretchDistortionFilter : GPUImageFilter {
GLint centerUniform;
}
/** The center about which to apply the distortion, with a default of (0.5, 0.5)
*/
@property(readwrite, nonatomic) CGPoint center;
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageStretchDistortionFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 72 |
```objective-c
#import "GPUImageOutput.h"
// The bytes passed into this input are not copied or retained, but you are free to deallocate them after they are used by this filter.
// The bytes are uploaded and stored within a texture, so nothing is kept locally.
// The default format for input bytes is GPUPixelFormatBGRA, unless specified with pixelFormat:
// The default type for input bytes is GPUPixelTypeUByte, unless specified with pixelType:
typedef enum {
GPUPixelFormatBGRA = GL_BGRA,
GPUPixelFormatRGBA = GL_RGBA,
GPUPixelFormatRGB = GL_RGB,
GPUPixelFormatLuminance = GL_LUMINANCE
} GPUPixelFormat;
typedef enum {
GPUPixelTypeUByte = GL_UNSIGNED_BYTE,
GPUPixelTypeFloat = GL_FLOAT
} GPUPixelType;
@interface GPUImageRawDataInput : GPUImageOutput
{
CGSize uploadedImageSize;
dispatch_semaphore_t dataUpdateSemaphore;
}
// Initialization and teardown
- (id)initWithBytes:(GLubyte *)bytesToUpload size:(CGSize)imageSize;
- (id)initWithBytes:(GLubyte *)bytesToUpload size:(CGSize)imageSize pixelFormat:(GPUPixelFormat)pixelFormat;
- (id)initWithBytes:(GLubyte *)bytesToUpload size:(CGSize)imageSize pixelFormat:(GPUPixelFormat)pixelFormat type:(GPUPixelType)pixelType;
/** Input data pixel format
*/
@property (readwrite, nonatomic) GPUPixelFormat pixelFormat;
@property (readwrite, nonatomic) GPUPixelType pixelType;
// Image rendering
- (void)updateDataFromBytes:(GLubyte *)bytesToUpload size:(CGSize)imageSize;
- (void)processData;
- (void)processDataForTimestamp:(CMTime)frameTime;
- (CGSize)outputImageSize;
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageRawDataInput.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 394 |
```objective-c
#import "GPUImageDifferenceBlendFilter.h"
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageDifferenceBlendFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
varying highp vec2 textureCoordinate2;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
void main()
{
mediump vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
mediump vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);
gl_FragColor = vec4(abs(textureColor2.rgb - textureColor.rgb), textureColor.a);
}
);
#else
NSString *const kGPUImageDifferenceBlendFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
varying vec2 textureCoordinate2;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
void main()
{
vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);
gl_FragColor = vec4(abs(textureColor2.rgb - textureColor.rgb), textureColor.a);
}
);
#endif
@implementation GPUImageDifferenceBlendFilter
- (id)init;
{
if (!(self = [super initWithFragmentShaderFromString:kGPUImageDifferenceBlendFragmentShaderString]))
{
return nil;
}
return self;
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageDifferenceBlendFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 306 |
```objective-c
#import "GPUImageColorMatrixFilter.h"
@interface GPUImageHSBFilter : GPUImageColorMatrixFilter
/** Reset the filter to have no transformations.
*/
- (void)reset;
/** Add a hue rotation to the filter.
The hue rotation is in the range [-360, 360] with 0 being no-change.
Note that this adjustment is additive, so use the reset method if you need to.
*/
- (void)rotateHue:(float)h;
/** Add a saturation adjustment to the filter.
The saturation adjustment is in the range [0.0, 2.0] with 1.0 being no-change.
Note that this adjustment is additive, so use the reset method if you need to.
*/
- (void)adjustSaturation:(float)s;
/** Add a brightness adjustment to the filter.
The brightness adjustment is in the range [0.0, 2.0] with 1.0 being no-change.
Note that this adjustment is additive, so use the reset method if you need to.
*/
- (void)adjustBrightness:(float)b;
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageHSBFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 227 |
```objective-c
#import "GPUImageRGBErosionFilter.h"
#import "GPUImageDilationFilter.h"
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageRGBErosionRadiusOneFragmentShaderString = SHADER_STRING
(
precision highp float;
varying vec2 centerTextureCoordinate;
varying vec2 oneStepPositiveTextureCoordinate;
varying vec2 oneStepNegativeTextureCoordinate;
uniform sampler2D inputImageTexture;
void main()
{
lowp vec4 centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate);
lowp vec4 oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate);
lowp vec4 oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate);
lowp vec4 minValue = min(centerIntensity, oneStepPositiveIntensity);
gl_FragColor = min(minValue, oneStepNegativeIntensity);
}
);
NSString *const kGPUImageRGBErosionRadiusTwoFragmentShaderString = SHADER_STRING
(
precision highp float;
varying vec2 centerTextureCoordinate;
varying vec2 oneStepPositiveTextureCoordinate;
varying vec2 oneStepNegativeTextureCoordinate;
varying vec2 twoStepsPositiveTextureCoordinate;
varying vec2 twoStepsNegativeTextureCoordinate;
uniform sampler2D inputImageTexture;
void main()
{
lowp vec4 centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate);
lowp vec4 oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate);
lowp vec4 oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate);
lowp vec4 twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate);
lowp vec4 twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate);
lowp vec4 minValue = min(centerIntensity, oneStepPositiveIntensity);
minValue = min(minValue, oneStepNegativeIntensity);
minValue = min(minValue, twoStepsPositiveIntensity);
gl_FragColor = min(minValue, twoStepsNegativeIntensity);
}
);
NSString *const kGPUImageRGBErosionRadiusThreeFragmentShaderString = SHADER_STRING
(
precision highp float;
varying vec2 centerTextureCoordinate;
varying vec2 oneStepPositiveTextureCoordinate;
varying vec2 oneStepNegativeTextureCoordinate;
varying vec2 twoStepsPositiveTextureCoordinate;
varying vec2 twoStepsNegativeTextureCoordinate;
varying vec2 threeStepsPositiveTextureCoordinate;
varying vec2 threeStepsNegativeTextureCoordinate;
uniform sampler2D inputImageTexture;
void main()
{
lowp vec4 centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate);
lowp vec4 oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate);
lowp vec4 oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate);
lowp vec4 twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate);
lowp vec4 twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate);
lowp vec4 threeStepsPositiveIntensity = texture2D(inputImageTexture, threeStepsPositiveTextureCoordinate);
lowp vec4 threeStepsNegativeIntensity = texture2D(inputImageTexture, threeStepsNegativeTextureCoordinate);
lowp vec4 minValue = min(centerIntensity, oneStepPositiveIntensity);
minValue = min(minValue, oneStepNegativeIntensity);
minValue = min(minValue, twoStepsPositiveIntensity);
minValue = min(minValue, twoStepsNegativeIntensity);
minValue = min(minValue, threeStepsPositiveIntensity);
gl_FragColor = min(minValue, threeStepsNegativeIntensity);
}
);
NSString *const kGPUImageRGBErosionRadiusFourFragmentShaderString = SHADER_STRING
(
precision highp float;
varying vec2 centerTextureCoordinate;
varying vec2 oneStepPositiveTextureCoordinate;
varying vec2 oneStepNegativeTextureCoordinate;
varying vec2 twoStepsPositiveTextureCoordinate;
varying vec2 twoStepsNegativeTextureCoordinate;
varying vec2 threeStepsPositiveTextureCoordinate;
varying vec2 threeStepsNegativeTextureCoordinate;
varying vec2 fourStepsPositiveTextureCoordinate;
varying vec2 fourStepsNegativeTextureCoordinate;
uniform sampler2D inputImageTexture;
void main()
{
lowp vec4 centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate);
lowp vec4 oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate);
lowp vec4 oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate);
lowp vec4 twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate);
lowp vec4 twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate);
lowp vec4 threeStepsPositiveIntensity = texture2D(inputImageTexture, threeStepsPositiveTextureCoordinate);
lowp vec4 threeStepsNegativeIntensity = texture2D(inputImageTexture, threeStepsNegativeTextureCoordinate);
lowp vec4 fourStepsPositiveIntensity = texture2D(inputImageTexture, fourStepsPositiveTextureCoordinate);
lowp vec4 fourStepsNegativeIntensity = texture2D(inputImageTexture, fourStepsNegativeTextureCoordinate);
lowp vec4 minValue = min(centerIntensity, oneStepPositiveIntensity);
minValue = min(minValue, oneStepNegativeIntensity);
minValue = min(minValue, twoStepsPositiveIntensity);
minValue = min(minValue, twoStepsNegativeIntensity);
minValue = min(minValue, threeStepsPositiveIntensity);
minValue = min(minValue, threeStepsNegativeIntensity);
minValue = min(minValue, fourStepsPositiveIntensity);
gl_FragColor = min(minValue, fourStepsNegativeIntensity);
}
);
#else
NSString *const kGPUImageRGBErosionRadiusOneFragmentShaderString = SHADER_STRING
(
varying vec2 centerTextureCoordinate;
varying vec2 oneStepPositiveTextureCoordinate;
varying vec2 oneStepNegativeTextureCoordinate;
uniform sampler2D inputImageTexture;
void main()
{
vec4 centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate);
vec4 oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate);
vec4 oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate);
vec4 minValue = min(centerIntensity, oneStepPositiveIntensity);
gl_FragColor = min(minValue, oneStepNegativeIntensity);
}
);
NSString *const kGPUImageRGBErosionRadiusTwoFragmentShaderString = SHADER_STRING
(
varying vec2 centerTextureCoordinate;
varying vec2 oneStepPositiveTextureCoordinate;
varying vec2 oneStepNegativeTextureCoordinate;
varying vec2 twoStepsPositiveTextureCoordinate;
varying vec2 twoStepsNegativeTextureCoordinate;
uniform sampler2D inputImageTexture;
void main()
{
vec4 centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate);
vec4 oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate);
vec4 oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate);
vec4 twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate);
vec4 twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate);
vec4 minValue = min(centerIntensity, oneStepPositiveIntensity);
minValue = min(minValue, oneStepNegativeIntensity);
minValue = min(minValue, twoStepsPositiveIntensity);
gl_FragColor = min(minValue, twoStepsNegativeIntensity);
}
);
NSString *const kGPUImageRGBErosionRadiusThreeFragmentShaderString = SHADER_STRING
(
varying vec2 centerTextureCoordinate;
varying vec2 oneStepPositiveTextureCoordinate;
varying vec2 oneStepNegativeTextureCoordinate;
varying vec2 twoStepsPositiveTextureCoordinate;
varying vec2 twoStepsNegativeTextureCoordinate;
varying vec2 threeStepsPositiveTextureCoordinate;
varying vec2 threeStepsNegativeTextureCoordinate;
uniform sampler2D inputImageTexture;
void main()
{
vec4 centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate);
vec4 oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate);
vec4 oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate);
vec4 twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate);
vec4 twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate);
vec4 threeStepsPositiveIntensity = texture2D(inputImageTexture, threeStepsPositiveTextureCoordinate);
vec4 threeStepsNegativeIntensity = texture2D(inputImageTexture, threeStepsNegativeTextureCoordinate);
vec4 minValue = min(centerIntensity, oneStepPositiveIntensity);
minValue = min(minValue, oneStepNegativeIntensity);
minValue = min(minValue, twoStepsPositiveIntensity);
minValue = min(minValue, twoStepsNegativeIntensity);
minValue = min(minValue, threeStepsPositiveIntensity);
gl_FragColor = min(minValue, threeStepsNegativeIntensity);
}
);
NSString *const kGPUImageRGBErosionRadiusFourFragmentShaderString = SHADER_STRING
(
varying vec2 centerTextureCoordinate;
varying vec2 oneStepPositiveTextureCoordinate;
varying vec2 oneStepNegativeTextureCoordinate;
varying vec2 twoStepsPositiveTextureCoordinate;
varying vec2 twoStepsNegativeTextureCoordinate;
varying vec2 threeStepsPositiveTextureCoordinate;
varying vec2 threeStepsNegativeTextureCoordinate;
varying vec2 fourStepsPositiveTextureCoordinate;
varying vec2 fourStepsNegativeTextureCoordinate;
uniform sampler2D inputImageTexture;
void main()
{
vec4 centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate);
vec4 oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate);
vec4 oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate);
vec4 twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate);
vec4 twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate);
vec4 threeStepsPositiveIntensity = texture2D(inputImageTexture, threeStepsPositiveTextureCoordinate);
vec4 threeStepsNegativeIntensity = texture2D(inputImageTexture, threeStepsNegativeTextureCoordinate);
vec4 fourStepsPositiveIntensity = texture2D(inputImageTexture, fourStepsPositiveTextureCoordinate);
vec4 fourStepsNegativeIntensity = texture2D(inputImageTexture, fourStepsNegativeTextureCoordinate);
vec4 minValue = min(centerIntensity, oneStepPositiveIntensity);
minValue = min(minValue, oneStepNegativeIntensity);
minValue = min(minValue, twoStepsPositiveIntensity);
minValue = min(minValue, twoStepsNegativeIntensity);
minValue = min(minValue, threeStepsPositiveIntensity);
minValue = min(minValue, threeStepsNegativeIntensity);
minValue = min(minValue, fourStepsPositiveIntensity);
gl_FragColor = min(minValue, fourStepsNegativeIntensity);
}
);
#endif
@implementation GPUImageRGBErosionFilter
#pragma mark -
#pragma mark Initialization and teardown
- (id)initWithRadius:(NSUInteger)erosionRadius;
{
NSString *fragmentShaderForThisRadius = nil;
NSString *vertexShaderForThisRadius = nil;
switch (erosionRadius)
{
case 0:
case 1:
{
vertexShaderForThisRadius = kGPUImageDilationRadiusOneVertexShaderString;
fragmentShaderForThisRadius = kGPUImageRGBErosionRadiusOneFragmentShaderString;
}; break;
case 2:
{
vertexShaderForThisRadius = kGPUImageDilationRadiusTwoVertexShaderString;
fragmentShaderForThisRadius = kGPUImageRGBErosionRadiusTwoFragmentShaderString;
}; break;
case 3:
{
vertexShaderForThisRadius = kGPUImageDilationRadiusThreeVertexShaderString;
fragmentShaderForThisRadius = kGPUImageRGBErosionRadiusThreeFragmentShaderString;
}; break;
case 4:
{
vertexShaderForThisRadius = kGPUImageDilationRadiusFourVertexShaderString;
fragmentShaderForThisRadius = kGPUImageRGBErosionRadiusFourFragmentShaderString;
}; break;
default:
{
vertexShaderForThisRadius = kGPUImageDilationRadiusFourVertexShaderString;
fragmentShaderForThisRadius = kGPUImageRGBErosionRadiusFourFragmentShaderString;
}; break;
}
if (!(self = [super initWithFirstStageVertexShaderFromString:vertexShaderForThisRadius firstStageFragmentShaderFromString:fragmentShaderForThisRadius secondStageVertexShaderFromString:vertexShaderForThisRadius secondStageFragmentShaderFromString:fragmentShaderForThisRadius]))
{
return nil;
}
return self;
}
- (id)init;
{
if (!(self = [self initWithRadius:1]))
{
return nil;
}
return self;
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageRGBErosionFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 2,757 |
```objective-c
#import "GPUImageSketchFilter.h"
@implementation GPUImageSketchFilter
// Invert the colorspace for a sketch
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageSketchFragmentShaderString = SHADER_STRING
(
precision mediump float;
varying vec2 textureCoordinate;
varying vec2 leftTextureCoordinate;
varying vec2 rightTextureCoordinate;
varying vec2 topTextureCoordinate;
varying vec2 topLeftTextureCoordinate;
varying vec2 topRightTextureCoordinate;
varying vec2 bottomTextureCoordinate;
varying vec2 bottomLeftTextureCoordinate;
varying vec2 bottomRightTextureCoordinate;
uniform float edgeStrength;
uniform sampler2D inputImageTexture;
void main()
{
float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r;
float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r;
float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r;
float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r;
float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r;
float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r;
float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r;
float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r;
float h = -topLeftIntensity - 2.0 * topIntensity - topRightIntensity + bottomLeftIntensity + 2.0 * bottomIntensity + bottomRightIntensity;
float v = -bottomLeftIntensity - 2.0 * leftIntensity - topLeftIntensity + bottomRightIntensity + 2.0 * rightIntensity + topRightIntensity;
float mag = 1.0 - (length(vec2(h, v)) * edgeStrength);
gl_FragColor = vec4(vec3(mag), 1.0);
}
);
#else
NSString *const kGPUImageSketchFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
varying vec2 leftTextureCoordinate;
varying vec2 rightTextureCoordinate;
varying vec2 topTextureCoordinate;
varying vec2 topLeftTextureCoordinate;
varying vec2 topRightTextureCoordinate;
varying vec2 bottomTextureCoordinate;
varying vec2 bottomLeftTextureCoordinate;
varying vec2 bottomRightTextureCoordinate;
uniform float edgeStrength;
uniform sampler2D inputImageTexture;
void main()
{
float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r;
float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r;
float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r;
float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r;
float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r;
float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r;
float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r;
float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r;
float h = -topLeftIntensity - 2.0 * topIntensity - topRightIntensity + bottomLeftIntensity + 2.0 * bottomIntensity + bottomRightIntensity;
float v = -bottomLeftIntensity - 2.0 * leftIntensity - topLeftIntensity + bottomRightIntensity + 2.0 * rightIntensity + topRightIntensity;
float mag = 1.0 - (length(vec2(h, v)) * edgeStrength);
gl_FragColor = vec4(vec3(mag), 1.0);
}
);
#endif
#pragma mark -
#pragma mark Initialization and teardown
- (id)init;
{
if (!(self = [self initWithFragmentShaderFromString:kGPUImageSketchFragmentShaderString]))
{
return nil;
}
return self;
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageSketchFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 833 |
```objective-c
// Created by Jorge Garcia on 9/5/12.
//
#import "GPUImageTwoInputFilter.h"
@interface GPUImageNormalBlendFilter : GPUImageTwoInputFilter
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageNormalBlendFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 40 |
```objective-c
#import "GPUImageFilter.h"
/** Performs a vignetting effect, fading out the image at the edges
*/
@interface GPUImageVignetteFilter : GPUImageFilter
{
GLint vignetteCenterUniform, vignetteColorUniform, vignetteStartUniform, vignetteEndUniform;
}
// the center for the vignette in tex coords (defaults to 0.5, 0.5)
@property (nonatomic, readwrite) CGPoint vignetteCenter;
// The color to use for the Vignette (defaults to black)
@property (nonatomic, readwrite) GPUVector3 vignetteColor;
// The normalized distance from the center where the vignette effect starts. Default of 0.5.
@property (nonatomic, readwrite) CGFloat vignetteStart;
// The normalized distance from the center where the vignette effect ends. Default of 0.75.
@property (nonatomic, readwrite) CGFloat vignetteEnd;
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageVignetteFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 192 |
```objective-c
#import "GPUImageFilter.h"
/** Kuwahara image abstraction, drawn from the work of Kyprianidis, et. al. in their publication "Anisotropic Kuwahara Filtering on the GPU" within the GPU Pro collection. This produces an oil-painting-like image, but it is extremely computationally expensive, so it can take seconds to render a frame on an iPad 2. This might be best used for still images.
*/
@interface GPUImageKuwaharaFilter : GPUImageFilter
{
GLint radiusUniform;
}
/// The radius to sample from when creating the brush-stroke effect, with a default of 3. The larger the radius, the slower the filter.
@property(readwrite, nonatomic) NSUInteger radius;
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageKuwaharaFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 156 |
```objective-c
#import "GPUImageTwoInputFilter.h"
/// Blends two images by taking the maximum value of each color component between the images
@interface GPUImageLightenBlendFilter : GPUImageTwoInputFilter
{
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageLightenBlendFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 46 |
```objective-c
#import "GPUImageSharpenFilter.h"
NSString *const kGPUImageSharpenVertexShaderString = SHADER_STRING
(
attribute vec4 position;
attribute vec4 inputTextureCoordinate;
uniform float imageWidthFactor;
uniform float imageHeightFactor;
uniform float sharpness;
varying vec2 textureCoordinate;
varying vec2 leftTextureCoordinate;
varying vec2 rightTextureCoordinate;
varying vec2 topTextureCoordinate;
varying vec2 bottomTextureCoordinate;
varying float centerMultiplier;
varying float edgeMultiplier;
void main()
{
gl_Position = position;
vec2 widthStep = vec2(imageWidthFactor, 0.0);
vec2 heightStep = vec2(0.0, imageHeightFactor);
textureCoordinate = inputTextureCoordinate.xy;
leftTextureCoordinate = inputTextureCoordinate.xy - widthStep;
rightTextureCoordinate = inputTextureCoordinate.xy + widthStep;
topTextureCoordinate = inputTextureCoordinate.xy + heightStep;
bottomTextureCoordinate = inputTextureCoordinate.xy - heightStep;
centerMultiplier = 1.0 + 4.0 * sharpness;
edgeMultiplier = sharpness;
}
);
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageSharpenFragmentShaderString = SHADER_STRING
(
precision highp float;
varying highp vec2 textureCoordinate;
varying highp vec2 leftTextureCoordinate;
varying highp vec2 rightTextureCoordinate;
varying highp vec2 topTextureCoordinate;
varying highp vec2 bottomTextureCoordinate;
varying highp float centerMultiplier;
varying highp float edgeMultiplier;
uniform sampler2D inputImageTexture;
void main()
{
mediump vec3 textureColor = texture2D(inputImageTexture, textureCoordinate).rgb;
mediump vec3 leftTextureColor = texture2D(inputImageTexture, leftTextureCoordinate).rgb;
mediump vec3 rightTextureColor = texture2D(inputImageTexture, rightTextureCoordinate).rgb;
mediump vec3 topTextureColor = texture2D(inputImageTexture, topTextureCoordinate).rgb;
mediump vec3 bottomTextureColor = texture2D(inputImageTexture, bottomTextureCoordinate).rgb;
gl_FragColor = vec4((textureColor * centerMultiplier - (leftTextureColor * edgeMultiplier + rightTextureColor * edgeMultiplier + topTextureColor * edgeMultiplier + bottomTextureColor * edgeMultiplier)), texture2D(inputImageTexture, bottomTextureCoordinate).w);
}
);
#else
NSString *const kGPUImageSharpenFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
varying vec2 leftTextureCoordinate;
varying vec2 rightTextureCoordinate;
varying vec2 topTextureCoordinate;
varying vec2 bottomTextureCoordinate;
varying float centerMultiplier;
varying float edgeMultiplier;
uniform sampler2D inputImageTexture;
void main()
{
vec3 textureColor = texture2D(inputImageTexture, textureCoordinate).rgb;
vec3 leftTextureColor = texture2D(inputImageTexture, leftTextureCoordinate).rgb;
vec3 rightTextureColor = texture2D(inputImageTexture, rightTextureCoordinate).rgb;
vec3 topTextureColor = texture2D(inputImageTexture, topTextureCoordinate).rgb;
vec3 bottomTextureColor = texture2D(inputImageTexture, bottomTextureCoordinate).rgb;
gl_FragColor = vec4((textureColor * centerMultiplier - (leftTextureColor * edgeMultiplier + rightTextureColor * edgeMultiplier + topTextureColor * edgeMultiplier + bottomTextureColor * edgeMultiplier)), texture2D(inputImageTexture, bottomTextureCoordinate).w);
}
);
#endif
@implementation GPUImageSharpenFilter
@synthesize sharpness = _sharpness;
#pragma mark -
#pragma mark Initialization and teardown
- (id)init;
{
if (!(self = [super initWithVertexShaderFromString:kGPUImageSharpenVertexShaderString fragmentShaderFromString:kGPUImageSharpenFragmentShaderString]))
{
return nil;
}
sharpnessUniform = [filterProgram uniformIndex:@"sharpness"];
self.sharpness = 0.0;
imageWidthFactorUniform = [filterProgram uniformIndex:@"imageWidthFactor"];
imageHeightFactorUniform = [filterProgram uniformIndex:@"imageHeightFactor"];
return self;
}
- (void)setupFilterForSize:(CGSize)filterFrameSize;
{
runSynchronouslyOnVideoProcessingQueue(^{
[GPUImageContext setActiveShaderProgram:filterProgram];
if (GPUImageRotationSwapsWidthAndHeight(inputRotation))
{
glUniform1f(imageWidthFactorUniform, 1.0 / filterFrameSize.height);
glUniform1f(imageHeightFactorUniform, 1.0 / filterFrameSize.width);
}
else
{
glUniform1f(imageWidthFactorUniform, 1.0 / filterFrameSize.width);
glUniform1f(imageHeightFactorUniform, 1.0 / filterFrameSize.height);
}
});
}
#pragma mark -
#pragma mark Accessors
- (void)setSharpness:(CGFloat)newValue;
{
_sharpness = newValue;
[self setFloat:_sharpness forUniform:sharpnessUniform program:filterProgram];
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageSharpenFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 1,081 |
```objective-c
#import "GPUImageSobelEdgeDetectionFilter.h"
#import "GPUImageGrayscaleFilter.h"
#import "GPUImage3x3ConvolutionFilter.h"
// Code from "Graphics Shaders: Theory and Practice" by M. Bailey and S. Cunningham
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageSobelEdgeDetectionFragmentShaderString = SHADER_STRING
(
precision mediump float;
varying vec2 textureCoordinate;
varying vec2 leftTextureCoordinate;
varying vec2 rightTextureCoordinate;
varying vec2 topTextureCoordinate;
varying vec2 topLeftTextureCoordinate;
varying vec2 topRightTextureCoordinate;
varying vec2 bottomTextureCoordinate;
varying vec2 bottomLeftTextureCoordinate;
varying vec2 bottomRightTextureCoordinate;
uniform sampler2D inputImageTexture;
uniform float edgeStrength;
void main()
{
float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r;
float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r;
float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r;
float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r;
float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r;
float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r;
float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r;
float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r;
float h = -topLeftIntensity - 2.0 * topIntensity - topRightIntensity + bottomLeftIntensity + 2.0 * bottomIntensity + bottomRightIntensity;
float v = -bottomLeftIntensity - 2.0 * leftIntensity - topLeftIntensity + bottomRightIntensity + 2.0 * rightIntensity + topRightIntensity;
float mag = length(vec2(h, v)) * edgeStrength;
gl_FragColor = vec4(vec3(mag), 1.0);
}
);
#else
NSString *const kGPUImageSobelEdgeDetectionFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
varying vec2 leftTextureCoordinate;
varying vec2 rightTextureCoordinate;
varying vec2 topTextureCoordinate;
varying vec2 topLeftTextureCoordinate;
varying vec2 topRightTextureCoordinate;
varying vec2 bottomTextureCoordinate;
varying vec2 bottomLeftTextureCoordinate;
varying vec2 bottomRightTextureCoordinate;
uniform sampler2D inputImageTexture;
uniform float edgeStrength;
void main()
{
float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r;
float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r;
float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r;
float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r;
float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r;
float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r;
float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r;
float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r;
float h = -topLeftIntensity - 2.0 * topIntensity - topRightIntensity + bottomLeftIntensity + 2.0 * bottomIntensity + bottomRightIntensity;
float v = -bottomLeftIntensity - 2.0 * leftIntensity - topLeftIntensity + bottomRightIntensity + 2.0 * rightIntensity + topRightIntensity;
float mag = length(vec2(h, v)) * edgeStrength;
gl_FragColor = vec4(vec3(mag), 1.0);
}
);
#endif
@implementation GPUImageSobelEdgeDetectionFilter
@synthesize texelWidth = _texelWidth;
@synthesize texelHeight = _texelHeight;
@synthesize edgeStrength = _edgeStrength;
#pragma mark -
#pragma mark Initialization and teardown
- (id)init;
{
if (!(self = [self initWithFragmentShaderFromString:kGPUImageSobelEdgeDetectionFragmentShaderString]))
{
return nil;
}
return self;
}
- (id)initWithFragmentShaderFromString:(NSString *)fragmentShaderString;
{
// Do a luminance pass first to reduce the calculations performed at each fragment in the edge detection phase
if (!(self = [super initWithFirstStageVertexShaderFromString:kGPUImageVertexShaderString firstStageFragmentShaderFromString:kGPUImageLuminanceFragmentShaderString secondStageVertexShaderFromString:kGPUImageNearbyTexelSamplingVertexShaderString secondStageFragmentShaderFromString:fragmentShaderString]))
{
return nil;
}
hasOverriddenImageSizeFactor = NO;
texelWidthUniform = [secondFilterProgram uniformIndex:@"texelWidth"];
texelHeightUniform = [secondFilterProgram uniformIndex:@"texelHeight"];
edgeStrengthUniform = [secondFilterProgram uniformIndex:@"edgeStrength"];
self.edgeStrength = 1.0;
return self;
}
- (void)setupFilterForSize:(CGSize)filterFrameSize;
{
if (!hasOverriddenImageSizeFactor)
{
_texelWidth = 1.0 / filterFrameSize.width;
_texelHeight = 1.0 / filterFrameSize.height;
runSynchronouslyOnVideoProcessingQueue(^{
GLProgram *previousProgram = [GPUImageContext sharedImageProcessingContext].currentShaderProgram;
[GPUImageContext setActiveShaderProgram:secondFilterProgram];
glUniform1f(texelWidthUniform, _texelWidth);
glUniform1f(texelHeightUniform, _texelHeight);
[GPUImageContext setActiveShaderProgram:previousProgram];
});
}
}
- (void)setUniformsForProgramAtIndex:(NSUInteger)programIndex;
{
[super setUniformsForProgramAtIndex:programIndex];
if (programIndex == 1)
{
glUniform1f(texelWidthUniform, _texelWidth);
glUniform1f(texelHeightUniform, _texelHeight);
}
}
- (BOOL)wantsMonochromeInput;
{
// return YES;
return NO;
}
- (BOOL)providesMonochromeOutput;
{
// return YES;
return NO;
}
#pragma mark -
#pragma mark Accessors
- (void)setTexelWidth:(CGFloat)newValue;
{
hasOverriddenImageSizeFactor = YES;
_texelWidth = newValue;
[self setFloat:_texelWidth forUniform:texelWidthUniform program:secondFilterProgram];
}
- (void)setTexelHeight:(CGFloat)newValue;
{
hasOverriddenImageSizeFactor = YES;
_texelHeight = newValue;
[self setFloat:_texelHeight forUniform:texelHeightUniform program:secondFilterProgram];
}
- (void)setEdgeStrength:(CGFloat)newValue;
{
_edgeStrength = newValue;
[self setFloat:_edgeStrength forUniform:edgeStrengthUniform program:secondFilterProgram];
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageSobelEdgeDetectionFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 1,495 |
```objective-c
#import "GPUImageGlassSphereFilter.h"
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageGlassSphereFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform highp vec2 center;
uniform highp float radius;
uniform highp float aspectRatio;
uniform highp float refractiveIndex;
// uniform vec3 lightPosition;
const highp vec3 lightPosition = vec3(-0.5, 0.5, 1.0);
const highp vec3 ambientLightPosition = vec3(0.0, 0.0, 1.0);
void main()
{
highp vec2 textureCoordinateToUse = vec2(textureCoordinate.x, (textureCoordinate.y * aspectRatio + 0.5 - 0.5 * aspectRatio));
highp float distanceFromCenter = distance(center, textureCoordinateToUse);
lowp float checkForPresenceWithinSphere = step(distanceFromCenter, radius);
distanceFromCenter = distanceFromCenter / radius;
highp float normalizedDepth = radius * sqrt(1.0 - distanceFromCenter * distanceFromCenter);
highp vec3 sphereNormal = normalize(vec3(textureCoordinateToUse - center, normalizedDepth));
highp vec3 refractedVector = 2.0 * refract(vec3(0.0, 0.0, -1.0), sphereNormal, refractiveIndex);
refractedVector.xy = -refractedVector.xy;
highp vec3 finalSphereColor = texture2D(inputImageTexture, (refractedVector.xy + 1.0) * 0.5).rgb;
// Grazing angle lighting
highp float lightingIntensity = 2.5 * (1.0 - pow(clamp(dot(ambientLightPosition, sphereNormal), 0.0, 1.0), 0.25));
finalSphereColor += lightingIntensity;
// Specular lighting
lightingIntensity = clamp(dot(normalize(lightPosition), sphereNormal), 0.0, 1.0);
lightingIntensity = pow(lightingIntensity, 15.0);
finalSphereColor += vec3(0.8, 0.8, 0.8) * lightingIntensity;
gl_FragColor = vec4(finalSphereColor, 1.0) * checkForPresenceWithinSphere;
}
);
#else
NSString *const kGPUImageGlassSphereFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform vec2 center;
uniform float radius;
uniform float aspectRatio;
uniform float refractiveIndex;
// uniform vec3 lightPosition;
const vec3 lightPosition = vec3(-0.5, 0.5, 1.0);
const vec3 ambientLightPosition = vec3(0.0, 0.0, 1.0);
void main()
{
vec2 textureCoordinateToUse = vec2(textureCoordinate.x, (textureCoordinate.y * aspectRatio + 0.5 - 0.5 * aspectRatio));
float distanceFromCenter = distance(center, textureCoordinateToUse);
float checkForPresenceWithinSphere = step(distanceFromCenter, radius);
distanceFromCenter = distanceFromCenter / radius;
float normalizedDepth = radius * sqrt(1.0 - distanceFromCenter * distanceFromCenter);
vec3 sphereNormal = normalize(vec3(textureCoordinateToUse - center, normalizedDepth));
vec3 refractedVector = 2.0 * refract(vec3(0.0, 0.0, -1.0), sphereNormal, refractiveIndex);
refractedVector.xy = -refractedVector.xy;
vec3 finalSphereColor = texture2D(inputImageTexture, (refractedVector.xy + 1.0) * 0.5).rgb;
// Grazing angle lighting
float lightingIntensity = 2.5 * (1.0 - pow(clamp(dot(ambientLightPosition, sphereNormal), 0.0, 1.0), 0.25));
finalSphereColor += lightingIntensity;
// Specular lighting
lightingIntensity = clamp(dot(normalize(lightPosition), sphereNormal), 0.0, 1.0);
lightingIntensity = pow(lightingIntensity, 15.0);
finalSphereColor += vec3(0.8, 0.8, 0.8) * lightingIntensity;
gl_FragColor = vec4(finalSphereColor, 1.0) * checkForPresenceWithinSphere;
}
);
#endif
@implementation GPUImageGlassSphereFilter
#pragma mark -
#pragma mark Initialization and teardown
- (id)init;
{
if (!(self = [super initWithFragmentShaderFromString:kGPUImageGlassSphereFragmentShaderString]))
{
return nil;
}
return self;
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageGlassSphereFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 1,062 |
```objective-c
#import "GPUImageHazeFilter.h"
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageHazeFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform lowp float hazeDistance;
uniform highp float slope;
void main()
{
//todo reconsider precision modifiers
highp vec4 color = vec4(1.0);//todo reimplement as a parameter
highp float d = textureCoordinate.y * slope + hazeDistance;
highp vec4 c = texture2D(inputImageTexture, textureCoordinate) ; // consider using unpremultiply
c = (c - d * color) / (1.0 -d);
gl_FragColor = c; //consider using premultiply(c);
}
);
#else
NSString *const kGPUImageHazeFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform float hazeDistance;
uniform float slope;
void main()
{
//todo reconsider precision modifiers
vec4 color = vec4(1.0);//todo reimplement as a parameter
float d = textureCoordinate.y * slope + hazeDistance;
vec4 c = texture2D(inputImageTexture, textureCoordinate) ; // consider using unpremultiply
c = (c - d * color) / (1.0 -d);
gl_FragColor = c; //consider using premultiply(c);
}
);
#endif
@implementation GPUImageHazeFilter
@synthesize distance = _distance;
@synthesize slope = _slope;
#pragma mark -
#pragma mark Initialization and teardown
- (id)init;
{
if (!(self = [super initWithFragmentShaderFromString:kGPUImageHazeFragmentShaderString]))
{
return nil;
}
distanceUniform = [filterProgram uniformIndex:@"hazeDistance"];
slopeUniform = [filterProgram uniformIndex:@"slope"];
self.distance = 0.2;
self.slope = 0.0;
return self;
}
#pragma mark -
#pragma mark Accessors
- (void)setDistance:(CGFloat)newValue;
{
_distance = newValue;
[self setFloat:_distance forUniform:distanceUniform program:filterProgram];
}
- (void)setSlope:(CGFloat)newValue;
{
_slope = newValue;
[self setFloat:_slope forUniform:slopeUniform program:filterProgram];
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageHazeFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 538 |
```objective-c
#import "GPUImageAverageColor.h"
NSString *const kGPUImageColorAveragingVertexShaderString = SHADER_STRING
(
attribute vec4 position;
attribute vec4 inputTextureCoordinate;
uniform float texelWidth;
uniform float texelHeight;
varying vec2 upperLeftInputTextureCoordinate;
varying vec2 upperRightInputTextureCoordinate;
varying vec2 lowerLeftInputTextureCoordinate;
varying vec2 lowerRightInputTextureCoordinate;
void main()
{
gl_Position = position;
upperLeftInputTextureCoordinate = inputTextureCoordinate.xy + vec2(-texelWidth, -texelHeight);
upperRightInputTextureCoordinate = inputTextureCoordinate.xy + vec2(texelWidth, -texelHeight);
lowerLeftInputTextureCoordinate = inputTextureCoordinate.xy + vec2(-texelWidth, texelHeight);
lowerRightInputTextureCoordinate = inputTextureCoordinate.xy + vec2(texelWidth, texelHeight);
}
);
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageColorAveragingFragmentShaderString = SHADER_STRING
(
precision highp float;
uniform sampler2D inputImageTexture;
varying highp vec2 outputTextureCoordinate;
varying highp vec2 upperLeftInputTextureCoordinate;
varying highp vec2 upperRightInputTextureCoordinate;
varying highp vec2 lowerLeftInputTextureCoordinate;
varying highp vec2 lowerRightInputTextureCoordinate;
void main()
{
highp vec4 upperLeftColor = texture2D(inputImageTexture, upperLeftInputTextureCoordinate);
highp vec4 upperRightColor = texture2D(inputImageTexture, upperRightInputTextureCoordinate);
highp vec4 lowerLeftColor = texture2D(inputImageTexture, lowerLeftInputTextureCoordinate);
highp vec4 lowerRightColor = texture2D(inputImageTexture, lowerRightInputTextureCoordinate);
gl_FragColor = 0.25 * (upperLeftColor + upperRightColor + lowerLeftColor + lowerRightColor);
}
);
#else
NSString *const kGPUImageColorAveragingFragmentShaderString = SHADER_STRING
(
uniform sampler2D inputImageTexture;
varying vec2 outputTextureCoordinate;
varying vec2 upperLeftInputTextureCoordinate;
varying vec2 upperRightInputTextureCoordinate;
varying vec2 lowerLeftInputTextureCoordinate;
varying vec2 lowerRightInputTextureCoordinate;
void main()
{
vec4 upperLeftColor = texture2D(inputImageTexture, upperLeftInputTextureCoordinate);
vec4 upperRightColor = texture2D(inputImageTexture, upperRightInputTextureCoordinate);
vec4 lowerLeftColor = texture2D(inputImageTexture, lowerLeftInputTextureCoordinate);
vec4 lowerRightColor = texture2D(inputImageTexture, lowerRightInputTextureCoordinate);
gl_FragColor = 0.25 * (upperLeftColor + upperRightColor + lowerLeftColor + lowerRightColor);
}
);
#endif
@implementation GPUImageAverageColor
@synthesize colorAverageProcessingFinishedBlock = _colorAverageProcessingFinishedBlock;
#pragma mark -
#pragma mark Initialization and teardown
- (id)init;
{
if (!(self = [super initWithVertexShaderFromString:kGPUImageColorAveragingVertexShaderString fragmentShaderFromString:kGPUImageColorAveragingFragmentShaderString]))
{
return nil;
}
texelWidthUniform = [filterProgram uniformIndex:@"texelWidth"];
texelHeightUniform = [filterProgram uniformIndex:@"texelHeight"];
finalStageSize = CGSizeMake(1.0, 1.0);
__unsafe_unretained GPUImageAverageColor *weakSelf = self;
[self setFrameProcessingCompletionBlock:^(GPUImageOutput *filter, CMTime frameTime) {
[weakSelf extractAverageColorAtFrameTime:frameTime];
}];
return self;
}
- (void)dealloc;
{
if (rawImagePixels != NULL)
{
free(rawImagePixels);
}
}
#pragma mark -
#pragma mark Managing the display FBOs
- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;
{
if (self.preventRendering)
{
[firstInputFramebuffer unlock];
return;
}
outputFramebuffer = nil;
[GPUImageContext setActiveShaderProgram:filterProgram];
glVertexAttribPointer(filterPositionAttribute, 2, GL_FLOAT, 0, 0, vertices);
glVertexAttribPointer(filterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);
GLuint currentTexture = [firstInputFramebuffer texture];
NSUInteger numberOfReductionsInX = floor(log(inputTextureSize.width) / log(4.0));
NSUInteger numberOfReductionsInY = floor(log(inputTextureSize.height) / log(4.0));
NSUInteger reductionsToHitSideLimit = MIN(numberOfReductionsInX, numberOfReductionsInY);
for (NSUInteger currentReduction = 0; currentReduction < reductionsToHitSideLimit; currentReduction++)
{
CGSize currentStageSize = CGSizeMake(floor(inputTextureSize.width / pow(4.0, currentReduction + 1.0)), floor(inputTextureSize.height / pow(4.0, currentReduction + 1.0)));
if ( (currentStageSize.height < 2.0) || (currentStageSize.width < 2.0) )
{
// A really small last stage seems to cause significant errors in the average, so I abort and leave the rest to the CPU at this point
break;
// currentStageSize.height = 2.0; // TODO: Rotate the image to account for this case, which causes FBO construction to fail
}
[outputFramebuffer unlock];
outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:currentStageSize textureOptions:self.outputTextureOptions onlyTexture:NO];
[outputFramebuffer activateFramebuffer];
glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
glActiveTexture(GL_TEXTURE2);
glBindTexture(GL_TEXTURE_2D, currentTexture);
glUniform1i(filterInputTextureUniform, 2);
glUniform1f(texelWidthUniform, 0.5 / currentStageSize.width);
glUniform1f(texelHeightUniform, 0.5 / currentStageSize.height);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
currentTexture = [outputFramebuffer texture];
finalStageSize = currentStageSize;
}
[firstInputFramebuffer unlock];
}
- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;
{
inputRotation = kGPUImageNoRotation;
}
- (void)extractAverageColorAtFrameTime:(CMTime)frameTime;
{
runSynchronouslyOnVideoProcessingQueue(^{
// we need a normal color texture for averaging the color values
NSAssert(self.outputTextureOptions.internalFormat == GL_RGBA, @"The output texture internal format for this filter must be GL_RGBA.");
NSAssert(self.outputTextureOptions.type == GL_UNSIGNED_BYTE, @"The type of the output texture of this filter must be GL_UNSIGNED_BYTE.");
NSUInteger totalNumberOfPixels = round(finalStageSize.width * finalStageSize.height);
if (rawImagePixels == NULL)
{
rawImagePixels = (GLubyte *)malloc(totalNumberOfPixels * 4);
}
[GPUImageContext useImageProcessingContext];
[outputFramebuffer activateFramebuffer];
glReadPixels(0, 0, (int)finalStageSize.width, (int)finalStageSize.height, GL_RGBA, GL_UNSIGNED_BYTE, rawImagePixels);
NSUInteger redTotal = 0, greenTotal = 0, blueTotal = 0, alphaTotal = 0;
NSUInteger byteIndex = 0;
for (NSUInteger currentPixel = 0; currentPixel < totalNumberOfPixels; currentPixel++)
{
redTotal += rawImagePixels[byteIndex++];
greenTotal += rawImagePixels[byteIndex++];
blueTotal += rawImagePixels[byteIndex++];
alphaTotal += rawImagePixels[byteIndex++];
}
CGFloat normalizedRedTotal = (CGFloat)redTotal / (CGFloat)totalNumberOfPixels / 255.0;
CGFloat normalizedGreenTotal = (CGFloat)greenTotal / (CGFloat)totalNumberOfPixels / 255.0;
CGFloat normalizedBlueTotal = (CGFloat)blueTotal / (CGFloat)totalNumberOfPixels / 255.0;
CGFloat normalizedAlphaTotal = (CGFloat)alphaTotal / (CGFloat)totalNumberOfPixels / 255.0;
if (_colorAverageProcessingFinishedBlock != NULL)
{
_colorAverageProcessingFinishedBlock(normalizedRedTotal, normalizedGreenTotal, normalizedBlueTotal, normalizedAlphaTotal, frameTime);
}
});
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageAverageColor.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 1,864 |
```objective-c
#import "GPUImageThreeInputFilter.h"
NSString *const kGPUImageThreeInputTextureVertexShaderString = SHADER_STRING
(
attribute vec4 position;
attribute vec4 inputTextureCoordinate;
attribute vec4 inputTextureCoordinate2;
attribute vec4 inputTextureCoordinate3;
varying vec2 textureCoordinate;
varying vec2 textureCoordinate2;
varying vec2 textureCoordinate3;
void main()
{
gl_Position = position;
textureCoordinate = inputTextureCoordinate.xy;
textureCoordinate2 = inputTextureCoordinate2.xy;
textureCoordinate3 = inputTextureCoordinate3.xy;
}
);
@implementation GPUImageThreeInputFilter
#pragma mark -
#pragma mark Initialization and teardown
- (id)initWithFragmentShaderFromString:(NSString *)fragmentShaderString;
{
if (!(self = [self initWithVertexShaderFromString:kGPUImageThreeInputTextureVertexShaderString fragmentShaderFromString:fragmentShaderString]))
{
return nil;
}
return self;
}
- (id)initWithVertexShaderFromString:(NSString *)vertexShaderString fragmentShaderFromString:(NSString *)fragmentShaderString;
{
if (!(self = [super initWithVertexShaderFromString:vertexShaderString fragmentShaderFromString:fragmentShaderString]))
{
return nil;
}
inputRotation3 = kGPUImageNoRotation;
hasSetSecondTexture = NO;
hasReceivedThirdFrame = NO;
thirdFrameWasVideo = NO;
thirdFrameCheckDisabled = NO;
thirdFrameTime = kCMTimeInvalid;
runSynchronouslyOnVideoProcessingQueue(^{
[GPUImageContext useImageProcessingContext];
filterThirdTextureCoordinateAttribute = [filterProgram attributeIndex:@"inputTextureCoordinate3"];
filterInputTextureUniform3 = [filterProgram uniformIndex:@"inputImageTexture3"]; // This does assume a name of "inputImageTexture3" for the third input texture in the fragment shader
glEnableVertexAttribArray(filterThirdTextureCoordinateAttribute);
});
return self;
}
- (void)initializeAttributes;
{
[super initializeAttributes];
[filterProgram addAttribute:@"inputTextureCoordinate3"];
}
- (void)disableThirdFrameCheck;
{
thirdFrameCheckDisabled = YES;
}
#pragma mark -
#pragma mark Rendering
- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;
{
if (self.preventRendering)
{
[firstInputFramebuffer unlock];
[secondInputFramebuffer unlock];
[thirdInputFramebuffer unlock];
return;
}
[GPUImageContext setActiveShaderProgram:filterProgram];
outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:[self sizeOfFBO] textureOptions:self.outputTextureOptions onlyTexture:NO];
[outputFramebuffer activateFramebuffer];
if (usingNextFrameForImageCapture)
{
[outputFramebuffer lock];
}
[self setUniformsForProgramAtIndex:0];
glClearColor(backgroundColorRed, backgroundColorGreen, backgroundColorBlue, backgroundColorAlpha);
glClear(GL_COLOR_BUFFER_BIT);
glActiveTexture(GL_TEXTURE2);
glBindTexture(GL_TEXTURE_2D, [firstInputFramebuffer texture]);
glUniform1i(filterInputTextureUniform, 2);
glActiveTexture(GL_TEXTURE3);
glBindTexture(GL_TEXTURE_2D, [secondInputFramebuffer texture]);
glUniform1i(filterInputTextureUniform2, 3);
glActiveTexture(GL_TEXTURE4);
glBindTexture(GL_TEXTURE_2D, [thirdInputFramebuffer texture]);
glUniform1i(filterInputTextureUniform3, 4);
glVertexAttribPointer(filterPositionAttribute, 2, GL_FLOAT, 0, 0, vertices);
glVertexAttribPointer(filterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);
glVertexAttribPointer(filterSecondTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, [[self class] textureCoordinatesForRotation:inputRotation2]);
glVertexAttribPointer(filterThirdTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, [[self class] textureCoordinatesForRotation:inputRotation3]);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
[firstInputFramebuffer unlock];
[secondInputFramebuffer unlock];
[thirdInputFramebuffer unlock];
if (usingNextFrameForImageCapture)
{
dispatch_semaphore_signal(imageCaptureSemaphore);
}
}
#pragma mark -
#pragma mark GPUImageInput
- (NSInteger)nextAvailableTextureIndex;
{
if (hasSetSecondTexture)
{
return 2;
}
else if (hasSetFirstTexture)
{
return 1;
}
else
{
return 0;
}
}
- (void)setInputFramebuffer:(GPUImageFramebuffer *)newInputFramebuffer atIndex:(NSInteger)textureIndex;
{
if (textureIndex == 0)
{
firstInputFramebuffer = newInputFramebuffer;
hasSetFirstTexture = YES;
[firstInputFramebuffer lock];
}
else if (textureIndex == 1)
{
secondInputFramebuffer = newInputFramebuffer;
hasSetSecondTexture = YES;
[secondInputFramebuffer lock];
}
else
{
thirdInputFramebuffer = newInputFramebuffer;
[thirdInputFramebuffer lock];
}
}
- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;
{
if (textureIndex == 0)
{
[super setInputSize:newSize atIndex:textureIndex];
if (CGSizeEqualToSize(newSize, CGSizeZero))
{
hasSetFirstTexture = NO;
}
}
else if (textureIndex == 1)
{
if (CGSizeEqualToSize(newSize, CGSizeZero))
{
hasSetSecondTexture = NO;
}
}
}
- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;
{
if (textureIndex == 0)
{
inputRotation = newInputRotation;
}
else if (textureIndex == 1)
{
inputRotation2 = newInputRotation;
}
else
{
inputRotation3 = newInputRotation;
}
}
- (CGSize)rotatedSize:(CGSize)sizeToRotate forIndex:(NSInteger)textureIndex;
{
CGSize rotatedSize = sizeToRotate;
GPUImageRotationMode rotationToCheck;
if (textureIndex == 0)
{
rotationToCheck = inputRotation;
}
else if (textureIndex == 1)
{
rotationToCheck = inputRotation2;
}
else
{
rotationToCheck = inputRotation3;
}
if (GPUImageRotationSwapsWidthAndHeight(rotationToCheck))
{
rotatedSize.width = sizeToRotate.height;
rotatedSize.height = sizeToRotate.width;
}
return rotatedSize;
}
- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex;
{
// You can set up infinite update loops, so this helps to short circuit them
if (hasReceivedFirstFrame && hasReceivedSecondFrame && hasReceivedThirdFrame)
{
return;
}
BOOL updatedMovieFrameOppositeStillImage = NO;
if (textureIndex == 0)
{
hasReceivedFirstFrame = YES;
firstFrameTime = frameTime;
if (secondFrameCheckDisabled)
{
hasReceivedSecondFrame = YES;
}
if (thirdFrameCheckDisabled)
{
hasReceivedThirdFrame = YES;
}
if (!CMTIME_IS_INDEFINITE(frameTime))
{
if CMTIME_IS_INDEFINITE(secondFrameTime)
{
updatedMovieFrameOppositeStillImage = YES;
}
}
}
else if (textureIndex == 1)
{
hasReceivedSecondFrame = YES;
secondFrameTime = frameTime;
if (firstFrameCheckDisabled)
{
hasReceivedFirstFrame = YES;
}
if (thirdFrameCheckDisabled)
{
hasReceivedThirdFrame = YES;
}
if (!CMTIME_IS_INDEFINITE(frameTime))
{
if CMTIME_IS_INDEFINITE(firstFrameTime)
{
updatedMovieFrameOppositeStillImage = YES;
}
}
}
else
{
hasReceivedThirdFrame = YES;
thirdFrameTime = frameTime;
if (firstFrameCheckDisabled)
{
hasReceivedFirstFrame = YES;
}
if (secondFrameCheckDisabled)
{
hasReceivedSecondFrame = YES;
}
if (!CMTIME_IS_INDEFINITE(frameTime))
{
if CMTIME_IS_INDEFINITE(firstFrameTime)
{
updatedMovieFrameOppositeStillImage = YES;
}
}
}
// || (hasReceivedFirstFrame && secondFrameCheckDisabled) || (hasReceivedSecondFrame && firstFrameCheckDisabled)
if ((hasReceivedFirstFrame && hasReceivedSecondFrame && hasReceivedThirdFrame) || updatedMovieFrameOppositeStillImage)
{
static const GLfloat imageVertices[] = {
-1.0f, -1.0f,
1.0f, -1.0f,
-1.0f, 1.0f,
1.0f, 1.0f,
};
[self renderToTextureWithVertices:imageVertices textureCoordinates:[[self class] textureCoordinatesForRotation:inputRotation]];
[self informTargetsAboutNewFrameAtTime:frameTime];
hasReceivedFirstFrame = NO;
hasReceivedSecondFrame = NO;
hasReceivedThirdFrame = NO;
}
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageThreeInputFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 2,036 |
```objective-c
#import "GPUImageOutput.h"
@interface GPUImageUIElement : GPUImageOutput
// Initialization and teardown
- (id)initWithView:(UIView *)inputView;
- (id)initWithLayer:(CALayer *)inputLayer;
// Layer management
- (CGSize)layerSizeInPixels;
- (void)update;
- (void)updateUsingCurrentTime;
- (void)updateWithTimestamp:(CMTime)frameTime;
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageUIElement.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 89 |
```objective-c
#import "GPUImageEmbossFilter.h"
@implementation GPUImageEmbossFilter
@synthesize intensity = _intensity;
- (id)init;
{
if (!(self = [super init]))
{
return nil;
}
self.intensity = 1.0;
return self;
}
#pragma mark -
#pragma mark Accessors
- (void)setIntensity:(CGFloat)newValue;
{
// [(GPUImage3x3ConvolutionFilter *)filter setConvolutionMatrix:(GPUMatrix3x3){
// {-2.0f, -1.0f, 0.0f},
// {-1.0f, 1.0f, 1.0f},
// { 0.0f, 1.0f, 2.0f}
// }];
_intensity = newValue;
GPUMatrix3x3 newConvolutionMatrix;
newConvolutionMatrix.one.one = _intensity * (-2.0);
newConvolutionMatrix.one.two = -_intensity;
newConvolutionMatrix.one.three = 0.0f;
newConvolutionMatrix.two.one = -_intensity;
newConvolutionMatrix.two.two = 1.0;
newConvolutionMatrix.two.three = _intensity;
newConvolutionMatrix.three.one = 0.0f;
newConvolutionMatrix.three.two = _intensity;
newConvolutionMatrix.three.three = _intensity * 2.0;
self.convolutionKernel = newConvolutionMatrix;
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageEmbossFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 345 |
```objective-c
//
// GPUImageKuwaharaRadius3Filter.h
#import "GPUImageFilter.h"
@interface GPUImageKuwaharaRadius3Filter : GPUImageFilter
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageKuwaharaRadius3Filter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 39 |
```objective-c
#import "GPUImageTwoPassTextureSamplingFilter.h"
@interface GPUImageLanczosResamplingFilter : GPUImageTwoPassTextureSamplingFilter
@property(readwrite, nonatomic) CGSize originalImageSize;
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageLanczosResamplingFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 43 |
```objective-c
#import "GPUImageFilter.h"
@interface GPUImageFalseColorFilter : GPUImageFilter
{
GLint firstColorUniform, secondColorUniform;
}
// The first and second colors specify what colors replace the dark and light areas of the image, respectively. The defaults are (0.0, 0.0, 0.5) amd (1.0, 0.0, 0.0).
@property(readwrite, nonatomic) GPUVector4 firstColor;
@property(readwrite, nonatomic) GPUVector4 secondColor;
- (void)setFirstColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent;
- (void)setSecondColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent;
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageFalseColorFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 162 |
```objective-c
#import "GPUImageFilter.h"
/** Transforms the colors of an image by applying a matrix to them
*/
@interface GPUImageColorMatrixFilter : GPUImageFilter
{
GLint colorMatrixUniform;
GLint intensityUniform;
}
/** A 4x4 matrix used to transform each color in an image
*/
@property(readwrite, nonatomic) GPUMatrix4x4 colorMatrix;
/** The degree to which the new transformed color replaces the original color for each pixel
*/
@property(readwrite, nonatomic) CGFloat intensity;
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageColorMatrixFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 109 |
```objective-c
#import "GPUImageFilter.h"
@interface GPUImageMotionBlurFilter : GPUImageFilter
/** A multiplier for the blur size, ranging from 0.0 on up, with a default of 1.0
*/
@property (readwrite, nonatomic) CGFloat blurSize;
/** The angular direction of the blur, in degrees. 0 degrees by default
*/
@property (readwrite, nonatomic) CGFloat blurAngle;
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageMotionBlurFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 88 |
```objective-c
#import "GPUImageAverageColor.h"
@interface GPUImageLuminosity : GPUImageAverageColor
{
GLProgram *secondFilterProgram;
GLint secondFilterPositionAttribute, secondFilterTextureCoordinateAttribute;
GLint secondFilterInputTextureUniform, secondFilterInputTextureUniform2;
GLint secondFilterTexelWidthUniform, secondFilterTexelHeightUniform;
}
// This block is called on the completion of color averaging for a frame
@property(nonatomic, copy) void(^luminosityProcessingFinishedBlock)(CGFloat luminosity, CMTime frameTime);
- (void)extractLuminosityAtFrameTime:(CMTime)frameTime;
- (void)initializeSecondaryAttributes;
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageLuminosity.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 140 |
```objective-c
#import "GPUImageTwoInputFilter.h"
@interface GPUImageDivideBlendFilter : GPUImageTwoInputFilter
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageDivideBlendFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 26 |
```objective-c
#import "GPUImageGammaFilter.h"
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageGammaFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform lowp float gamma;
void main()
{
lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
gl_FragColor = vec4(pow(textureColor.rgb, vec3(gamma)), textureColor.w);
}
);
#else
NSString *const kGPUImageGammaFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform float gamma;
void main()
{
vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
gl_FragColor = vec4(pow(textureColor.rgb, vec3(gamma)), textureColor.w);
}
);
#endif
@implementation GPUImageGammaFilter
@synthesize gamma = _gamma;
#pragma mark -
#pragma mark Initialization and teardown
- (id)init;
{
if (!(self = [super initWithFragmentShaderFromString:kGPUImageGammaFragmentShaderString]))
{
return nil;
}
gammaUniform = [filterProgram uniformIndex:@"gamma"];
self.gamma = 1.0;
return self;
}
#pragma mark -
#pragma mark Accessors
- (void)setGamma:(CGFloat)newValue;
{
_gamma = newValue;
[self setFloat:_gamma forUniform:gammaUniform program:filterProgram];
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageGammaFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 321 |
```objective-c
#import "GPUImageFilter.h"
/** This reduces the color dynamic range into the number of steps specified, leading to a cartoon-like simple shading of the image.
*/
@interface GPUImagePosterizeFilter : GPUImageFilter
{
GLint colorLevelsUniform;
}
/** The number of color levels to reduce the image space to. This ranges from 1 to 256, with a default of 10.
*/
@property(readwrite, nonatomic) NSUInteger colorLevels;
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImagePosterizeFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 96 |
```objective-c
#import "GPUImagePinchDistortionFilter.h"
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImagePinchDistortionFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform highp float aspectRatio;
uniform highp vec2 center;
uniform highp float radius;
uniform highp float scale;
void main()
{
highp vec2 textureCoordinateToUse = vec2(textureCoordinate.x, (textureCoordinate.y * aspectRatio + 0.5 - 0.5 * aspectRatio));
highp float dist = distance(center, textureCoordinateToUse);
textureCoordinateToUse = textureCoordinate;
if (dist < radius)
{
textureCoordinateToUse -= center;
highp float percent = 1.0 + ((0.5 - dist) / 0.5) * scale;
textureCoordinateToUse = textureCoordinateToUse * percent;
textureCoordinateToUse += center;
gl_FragColor = texture2D(inputImageTexture, textureCoordinateToUse );
}
else
{
gl_FragColor = texture2D(inputImageTexture, textureCoordinate );
}
}
);
#else
NSString *const kGPUImagePinchDistortionFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform float aspectRatio;
uniform vec2 center;
uniform float radius;
uniform float scale;
void main()
{
vec2 textureCoordinateToUse = vec2(textureCoordinate.x, (textureCoordinate.y * aspectRatio + 0.5 - 0.5 * aspectRatio));
float dist = distance(center, textureCoordinateToUse);
textureCoordinateToUse = textureCoordinate;
if (dist < radius)
{
textureCoordinateToUse -= center;
float percent = 1.0 + ((0.5 - dist) / 0.5) * scale;
textureCoordinateToUse = textureCoordinateToUse * percent;
textureCoordinateToUse += center;
gl_FragColor = texture2D(inputImageTexture, textureCoordinateToUse );
}
else
{
gl_FragColor = texture2D(inputImageTexture, textureCoordinate );
}
}
);
#endif
@interface GPUImagePinchDistortionFilter ()
- (void)adjustAspectRatio;
@property (readwrite, nonatomic) CGFloat aspectRatio;
@end
@implementation GPUImagePinchDistortionFilter
@synthesize aspectRatio = _aspectRatio;
@synthesize center = _center;
@synthesize radius = _radius;
@synthesize scale = _scale;
#pragma mark -
#pragma mark Initialization and teardown
- (id)init;
{
if (!(self = [super initWithFragmentShaderFromString:kGPUImagePinchDistortionFragmentShaderString]))
{
return nil;
}
aspectRatioUniform = [filterProgram uniformIndex:@"aspectRatio"];
radiusUniform = [filterProgram uniformIndex:@"radius"];
scaleUniform = [filterProgram uniformIndex:@"scale"];
centerUniform = [filterProgram uniformIndex:@"center"];
self.radius = 1.0;
self.scale = 0.5;
self.center = CGPointMake(0.5, 0.5);
return self;
}
#pragma mark -
#pragma mark Accessors
- (void)adjustAspectRatio;
{
if (GPUImageRotationSwapsWidthAndHeight(inputRotation))
{
[self setAspectRatio:(inputTextureSize.width / inputTextureSize.height)];
}
else
{
[self setAspectRatio:(inputTextureSize.height / inputTextureSize.width)];
}
}
- (void)forceProcessingAtSize:(CGSize)frameSize;
{
[super forceProcessingAtSize:frameSize];
[self adjustAspectRatio];
}
- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;
{
CGSize oldInputSize = inputTextureSize;
[super setInputSize:newSize atIndex:textureIndex];
if ( (!CGSizeEqualToSize(oldInputSize, inputTextureSize)) && (!CGSizeEqualToSize(newSize, CGSizeZero)) )
{
[self adjustAspectRatio];
}
}
- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;
{
[super setInputRotation:newInputRotation atIndex:textureIndex];
[self setCenter:self.center];
[self adjustAspectRatio];
}
- (void)setAspectRatio:(CGFloat)newValue;
{
_aspectRatio = newValue;
[self setFloat:_aspectRatio forUniform:aspectRatioUniform program:filterProgram];
}
- (void)setRadius:(CGFloat)newValue;
{
_radius = newValue;
[self setFloat:_radius forUniform:radiusUniform program:filterProgram];
}
- (void)setScale:(CGFloat)newValue;
{
_scale = newValue;
[self setFloat:_scale forUniform:scaleUniform program:filterProgram];
}
- (void)setCenter:(CGPoint)newValue;
{
_center = newValue;
CGPoint rotatedPoint = [self rotatedPoint:_center forRotation:inputRotation];
[self setPoint:rotatedPoint forUniform:centerUniform program:filterProgram];
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImagePinchDistortionFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 1,090 |
```objective-c
#import "GPUImageMissEtikateFilter.h"
#import "GPUImagePicture.h"
#import "GPUImageLookupFilter.h"
@implementation GPUImageMissEtikateFilter
- (id)init;
{
if (!(self = [super init]))
{
return nil;
}
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
UIImage *image = [UIImage imageNamed:@"lookup_miss_etikate.png"];
#else
NSImage *image = [NSImage imageNamed:@"lookup_miss_etikate.png"];
#endif
NSAssert(image, @"To use GPUImageMissEtikateFilter you need to add lookup_miss_etikate.png from GPUImage/framework/Resources to your application bundle.");
lookupImageSource = [[GPUImagePicture alloc] initWithImage:image];
GPUImageLookupFilter *lookupFilter = [[GPUImageLookupFilter alloc] init];
[self addFilter:lookupFilter];
[lookupImageSource addTarget:lookupFilter atTextureLocation:1];
[lookupImageSource processImage];
self.initialFilters = [NSArray arrayWithObjects:lookupFilter, nil];
self.terminalFilter = lookupFilter;
return self;
}
#pragma mark -
#pragma mark Accessors
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageMissEtikateFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 248 |
```objective-c
#import "GPUImageFilterGroup.h"
#import "GPUImagePicture.h"
@implementation GPUImageFilterGroup
@synthesize terminalFilter = _terminalFilter;
@synthesize initialFilters = _initialFilters;
@synthesize inputFilterToIgnoreForUpdates = _inputFilterToIgnoreForUpdates;
- (id)init;
{
if (!(self = [super init]))
{
return nil;
}
filters = [[NSMutableArray alloc] init];
return self;
}
#pragma mark -
#pragma mark Filter management
- (void)addFilter:(GPUImageOutput<GPUImageInput> *)newFilter;
{
[filters addObject:newFilter];
}
- (GPUImageOutput<GPUImageInput> *)filterAtIndex:(NSUInteger)filterIndex;
{
return [filters objectAtIndex:filterIndex];
}
- (NSUInteger)filterCount;
{
return [filters count];
}
#pragma mark -
#pragma mark Still image processing
- (void)useNextFrameForImageCapture;
{
[self.terminalFilter useNextFrameForImageCapture];
}
- (CGImageRef)newCGImageFromCurrentlyProcessedOutput;
{
return [self.terminalFilter newCGImageFromCurrentlyProcessedOutput];
}
#pragma mark -
#pragma mark GPUImageOutput overrides
- (void)setTargetToIgnoreForUpdates:(id<GPUImageInput>)targetToIgnoreForUpdates;
{
[_terminalFilter setTargetToIgnoreForUpdates:targetToIgnoreForUpdates];
}
- (void)addTarget:(id<GPUImageInput>)newTarget atTextureLocation:(NSInteger)textureLocation;
{
[_terminalFilter addTarget:newTarget atTextureLocation:textureLocation];
}
- (void)removeTarget:(id<GPUImageInput>)targetToRemove;
{
[_terminalFilter removeTarget:targetToRemove];
}
- (void)removeAllTargets;
{
[_terminalFilter removeAllTargets];
}
- (NSArray *)targets;
{
return [_terminalFilter targets];
}
- (void)setFrameProcessingCompletionBlock:(void (^)(GPUImageOutput *, CMTime))frameProcessingCompletionBlock;
{
[_terminalFilter setFrameProcessingCompletionBlock:frameProcessingCompletionBlock];
}
- (void (^)(GPUImageOutput *, CMTime))frameProcessingCompletionBlock;
{
return [_terminalFilter frameProcessingCompletionBlock];
}
#pragma mark -
#pragma mark GPUImageInput protocol
- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex;
{
for (GPUImageOutput<GPUImageInput> *currentFilter in _initialFilters)
{
if (currentFilter != self.inputFilterToIgnoreForUpdates)
{
[currentFilter newFrameReadyAtTime:frameTime atIndex:textureIndex];
}
}
}
- (void)setInputFramebuffer:(GPUImageFramebuffer *)newInputFramebuffer atIndex:(NSInteger)textureIndex;
{
for (GPUImageOutput<GPUImageInput> *currentFilter in _initialFilters)
{
[currentFilter setInputFramebuffer:newInputFramebuffer atIndex:textureIndex];
}
}
- (NSInteger)nextAvailableTextureIndex;
{
// if ([_initialFilters count] > 0)
// {
// return [[_initialFilters objectAtIndex:0] nextAvailableTextureIndex];
// }
return 0;
}
- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;
{
for (GPUImageOutput<GPUImageInput> *currentFilter in _initialFilters)
{
[currentFilter setInputSize:newSize atIndex:textureIndex];
}
}
- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;
{
for (GPUImageOutput<GPUImageInput> *currentFilter in _initialFilters)
{
[currentFilter setInputRotation:newInputRotation atIndex:(NSInteger)textureIndex];
}
}
- (void)forceProcessingAtSize:(CGSize)frameSize;
{
for (GPUImageOutput<GPUImageInput> *currentFilter in filters)
{
[currentFilter forceProcessingAtSize:frameSize];
}
}
- (void)forceProcessingAtSizeRespectingAspectRatio:(CGSize)frameSize;
{
for (GPUImageOutput<GPUImageInput> *currentFilter in filters)
{
[currentFilter forceProcessingAtSizeRespectingAspectRatio:frameSize];
}
}
- (CGSize)maximumOutputSize;
{
// I'm temporarily disabling adjustments for smaller output sizes until I figure out how to make this work better
return CGSizeZero;
/*
if (CGSizeEqualToSize(cachedMaximumOutputSize, CGSizeZero))
{
for (id<GPUImageInput> currentTarget in _initialFilters)
{
if ([currentTarget maximumOutputSize].width > cachedMaximumOutputSize.width)
{
cachedMaximumOutputSize = [currentTarget maximumOutputSize];
}
}
}
return cachedMaximumOutputSize;
*/
}
- (void)endProcessing;
{
if (!isEndProcessing)
{
isEndProcessing = YES;
for (id<GPUImageInput> currentTarget in _initialFilters)
{
[currentTarget endProcessing];
}
}
}
- (BOOL)wantsMonochromeInput;
{
BOOL allInputsWantMonochromeInput = YES;
for (GPUImageOutput<GPUImageInput> *currentFilter in _initialFilters)
{
allInputsWantMonochromeInput = allInputsWantMonochromeInput && [currentFilter wantsMonochromeInput];
}
return allInputsWantMonochromeInput;
}
- (void)setCurrentlyReceivingMonochromeInput:(BOOL)newValue;
{
for (GPUImageOutput<GPUImageInput> *currentFilter in _initialFilters)
{
[currentFilter setCurrentlyReceivingMonochromeInput:newValue];
}
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageFilterGroup.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 1,187 |
```objective-c
#import "GPUImageRGBOpeningFilter.h"
#import "GPUImageRGBErosionFilter.h"
#import "GPUImageRGBDilationFilter.h"
@implementation GPUImageRGBOpeningFilter
- (id)init;
{
if (!(self = [self initWithRadius:1]))
{
return nil;
}
return self;
}
- (id)initWithRadius:(NSUInteger)radius;
{
if (!(self = [super init]))
{
return nil;
}
// First pass: erosion
erosionFilter = [[GPUImageRGBErosionFilter alloc] initWithRadius:radius];
[self addFilter:erosionFilter];
// Second pass: dilation
dilationFilter = [[GPUImageRGBDilationFilter alloc] initWithRadius:radius];
[self addFilter:dilationFilter];
[erosionFilter addTarget:dilationFilter];
self.initialFilters = [NSArray arrayWithObjects:erosionFilter, nil];
self.terminalFilter = dilationFilter;
return self;
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageRGBOpeningFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 217 |
```objective-c
#import "GPUImageTwoInputFilter.h"
@interface GPUImageHardLightBlendFilter : GPUImageTwoInputFilter
{
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageHardLightBlendFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 28 |
```objective-c
#import "GPUImageTwoInputFilter.h"
/** Selectively replaces a color in the first image with the second image
*/
@interface GPUImageChromaKeyBlendFilter : GPUImageTwoInputFilter
{
GLint colorToReplaceUniform, thresholdSensitivityUniform, smoothingUniform;
}
/** The threshold sensitivity controls how similar pixels need to be colored to be replaced
The default value is 0.3
*/
@property(readwrite, nonatomic) CGFloat thresholdSensitivity;
/** The degree of smoothing controls how gradually similar colors are replaced in the image
The default value is 0.1
*/
@property(readwrite, nonatomic) CGFloat smoothing;
/** The color to be replaced is specified using individual red, green, and blue components (normalized to 1.0).
The default is green: (0.0, 1.0, 0.0).
@param redComponent Red component of color to be replaced
@param greenComponent Green component of color to be replaced
@param blueComponent Blue component of color to be replaced
*/
- (void)setColorToReplaceRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent;
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageChromaKeyBlendFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 242 |
```objective-c
#import "GPUImageColorPackingFilter.h"
NSString *const kGPUImageColorPackingVertexShaderString = SHADER_STRING
(
attribute vec4 position;
attribute vec4 inputTextureCoordinate;
uniform float texelWidth;
uniform float texelHeight;
varying vec2 upperLeftInputTextureCoordinate;
varying vec2 upperRightInputTextureCoordinate;
varying vec2 lowerLeftInputTextureCoordinate;
varying vec2 lowerRightInputTextureCoordinate;
void main()
{
gl_Position = position;
upperLeftInputTextureCoordinate = inputTextureCoordinate.xy + vec2(-texelWidth, -texelHeight);
upperRightInputTextureCoordinate = inputTextureCoordinate.xy + vec2(texelWidth, -texelHeight);
lowerLeftInputTextureCoordinate = inputTextureCoordinate.xy + vec2(-texelWidth, texelHeight);
lowerRightInputTextureCoordinate = inputTextureCoordinate.xy + vec2(texelWidth, texelHeight);
}
);
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageColorPackingFragmentShaderString = SHADER_STRING
(
precision lowp float;
uniform sampler2D inputImageTexture;
uniform mediump mat3 convolutionMatrix;
varying highp vec2 outputTextureCoordinate;
varying highp vec2 upperLeftInputTextureCoordinate;
varying highp vec2 upperRightInputTextureCoordinate;
varying highp vec2 lowerLeftInputTextureCoordinate;
varying highp vec2 lowerRightInputTextureCoordinate;
void main()
{
float upperLeftIntensity = texture2D(inputImageTexture, upperLeftInputTextureCoordinate).r;
float upperRightIntensity = texture2D(inputImageTexture, upperRightInputTextureCoordinate).r;
float lowerLeftIntensity = texture2D(inputImageTexture, lowerLeftInputTextureCoordinate).r;
float lowerRightIntensity = texture2D(inputImageTexture, lowerRightInputTextureCoordinate).r;
gl_FragColor = vec4(upperLeftIntensity, upperRightIntensity, lowerLeftIntensity, lowerRightIntensity);
}
);
#else
NSString *const kGPUImageColorPackingFragmentShaderString = SHADER_STRING
(
uniform sampler2D inputImageTexture;
uniform mat3 convolutionMatrix;
varying vec2 outputTextureCoordinate;
varying vec2 upperLeftInputTextureCoordinate;
varying vec2 upperRightInputTextureCoordinate;
varying vec2 lowerLeftInputTextureCoordinate;
varying vec2 lowerRightInputTextureCoordinate;
void main()
{
float upperLeftIntensity = texture2D(inputImageTexture, upperLeftInputTextureCoordinate).r;
float upperRightIntensity = texture2D(inputImageTexture, upperRightInputTextureCoordinate).r;
float lowerLeftIntensity = texture2D(inputImageTexture, lowerLeftInputTextureCoordinate).r;
float lowerRightIntensity = texture2D(inputImageTexture, lowerRightInputTextureCoordinate).r;
gl_FragColor = vec4(upperLeftIntensity, upperRightIntensity, lowerLeftIntensity, lowerRightIntensity);
}
);
#endif
@implementation GPUImageColorPackingFilter
#pragma mark -
#pragma mark Initialization and teardown
- (id)init;
{
if (!(self = [super initWithVertexShaderFromString:kGPUImageColorPackingVertexShaderString fragmentShaderFromString:kGPUImageColorPackingFragmentShaderString]))
{
return nil;
}
texelWidthUniform = [filterProgram uniformIndex:@"texelWidth"];
texelHeightUniform = [filterProgram uniformIndex:@"texelHeight"];
return self;
}
- (void)setupFilterForSize:(CGSize)filterFrameSize;
{
texelWidth = 0.5 / inputTextureSize.width;
texelHeight = 0.5 / inputTextureSize.height;
runSynchronouslyOnVideoProcessingQueue(^{
[GPUImageContext setActiveShaderProgram:filterProgram];
glUniform1f(texelWidthUniform, texelWidth);
glUniform1f(texelHeightUniform, texelHeight);
});
}
#pragma mark -
#pragma mark Managing the display FBOs
- (CGSize)sizeOfFBO;
{
CGSize outputSize = [self maximumOutputSize];
if ( (CGSizeEqualToSize(outputSize, CGSizeZero)) || (inputTextureSize.width < outputSize.width) )
{
CGSize quarterSize;
quarterSize.width = inputTextureSize.width / 2.0;
quarterSize.height = inputTextureSize.height / 2.0;
return quarterSize;
}
else
{
return outputSize;
}
}
#pragma mark -
#pragma mark Rendering
- (CGSize)outputFrameSize;
{
CGSize quarterSize;
quarterSize.width = inputTextureSize.width / 2.0;
quarterSize.height = inputTextureSize.height / 2.0;
return quarterSize;
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageColorPackingFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 993 |
```objective-c
#import "GPUImageRGBFilter.h"
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageRGBFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform highp float redAdjustment;
uniform highp float greenAdjustment;
uniform highp float blueAdjustment;
void main()
{
highp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
gl_FragColor = vec4(textureColor.r * redAdjustment, textureColor.g * greenAdjustment, textureColor.b * blueAdjustment, textureColor.a);
}
);
#else
NSString *const kGPUImageRGBFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform float redAdjustment;
uniform float greenAdjustment;
uniform float blueAdjustment;
void main()
{
vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
gl_FragColor = vec4(textureColor.r * redAdjustment, textureColor.g * greenAdjustment, textureColor.b * blueAdjustment, textureColor.a);
}
);
#endif
@implementation GPUImageRGBFilter
@synthesize red = _red, blue = _blue, green = _green;
#pragma mark -
#pragma mark Initialization and teardown
- (id)init;
{
if (!(self = [super initWithFragmentShaderFromString:kGPUImageRGBFragmentShaderString]))
{
return nil;
}
redUniform = [filterProgram uniformIndex:@"redAdjustment"];
self.red = 1.0;
greenUniform = [filterProgram uniformIndex:@"greenAdjustment"];
self.green = 1.0;
blueUniform = [filterProgram uniformIndex:@"blueAdjustment"];
self.blue = 1.0;
return self;
}
#pragma mark -
#pragma mark Accessors
- (void)setRed:(CGFloat)newValue;
{
_red = newValue;
[self setFloat:_red forUniform:redUniform program:filterProgram];
}
- (void)setGreen:(CGFloat)newValue;
{
_green = newValue;
[self setFloat:_green forUniform:greenUniform program:filterProgram];
}
- (void)setBlue:(CGFloat)newValue;
{
_blue = newValue;
[self setFloat:_blue forUniform:blueUniform program:filterProgram];
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageRGBFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 508 |
```objective-c
#import "GPUImageFilter.h"
extern NSString *const kGPUImageLuminanceFragmentShaderString;
/** Converts an image to grayscale (a slightly faster implementation of the saturation filter, without the ability to vary the color contribution)
*/
@interface GPUImageGrayscaleFilter : GPUImageFilter
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageGrayscaleFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 61 |
```objective-c
#import "GPUImageZoomBlurFilter.h"
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageZoomBlurFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform highp vec2 blurCenter;
uniform highp float blurSize;
void main()
{
// TODO: Do a more intelligent scaling based on resolution here
highp vec2 samplingOffset = 1.0/100.0 * (blurCenter - textureCoordinate) * blurSize;
lowp vec4 fragmentColor = texture2D(inputImageTexture, textureCoordinate) * 0.18;
fragmentColor += texture2D(inputImageTexture, textureCoordinate + samplingOffset) * 0.15;
fragmentColor += texture2D(inputImageTexture, textureCoordinate + (2.0 * samplingOffset)) * 0.12;
fragmentColor += texture2D(inputImageTexture, textureCoordinate + (3.0 * samplingOffset)) * 0.09;
fragmentColor += texture2D(inputImageTexture, textureCoordinate + (4.0 * samplingOffset)) * 0.05;
fragmentColor += texture2D(inputImageTexture, textureCoordinate - samplingOffset) * 0.15;
fragmentColor += texture2D(inputImageTexture, textureCoordinate - (2.0 * samplingOffset)) * 0.12;
fragmentColor += texture2D(inputImageTexture, textureCoordinate - (3.0 * samplingOffset)) * 0.09;
fragmentColor += texture2D(inputImageTexture, textureCoordinate - (4.0 * samplingOffset)) * 0.05;
gl_FragColor = fragmentColor;
}
);
#else
NSString *const kGPUImageZoomBlurFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform vec2 blurCenter;
uniform float blurSize;
void main()
{
// TODO: Do a more intelligent scaling based on resolution here
vec2 samplingOffset = 1.0/100.0 * (blurCenter - textureCoordinate) * blurSize;
vec4 fragmentColor = texture2D(inputImageTexture, textureCoordinate) * 0.18;
fragmentColor += texture2D(inputImageTexture, textureCoordinate + samplingOffset) * 0.15;
fragmentColor += texture2D(inputImageTexture, textureCoordinate + (2.0 * samplingOffset)) * 0.12;
fragmentColor += texture2D(inputImageTexture, textureCoordinate + (3.0 * samplingOffset)) * 0.09;
fragmentColor += texture2D(inputImageTexture, textureCoordinate + (4.0 * samplingOffset)) * 0.05;
fragmentColor += texture2D(inputImageTexture, textureCoordinate - samplingOffset) * 0.15;
fragmentColor += texture2D(inputImageTexture, textureCoordinate - (2.0 * samplingOffset)) * 0.12;
fragmentColor += texture2D(inputImageTexture, textureCoordinate - (3.0 * samplingOffset)) * 0.09;
fragmentColor += texture2D(inputImageTexture, textureCoordinate - (4.0 * samplingOffset)) * 0.05;
gl_FragColor = fragmentColor;
}
);
#endif
@interface GPUImageZoomBlurFilter()
{
GLint blurSizeUniform, blurCenterUniform;
}
@end
@implementation GPUImageZoomBlurFilter
@synthesize blurSize = _blurSize;
@synthesize blurCenter = _blurCenter;
#pragma mark -
#pragma mark Initialization and teardown
- (id)init;
{
if (!(self = [super initWithFragmentShaderFromString:kGPUImageZoomBlurFragmentShaderString]))
{
return nil;
}
blurSizeUniform = [filterProgram uniformIndex:@"blurSize"];
blurCenterUniform = [filterProgram uniformIndex:@"blurCenter"];
self.blurSize = 1.0;
self.blurCenter = CGPointMake(0.5, 0.5);
return self;
}
#pragma mark -
#pragma mark Accessors
- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;
{
[super setInputRotation:newInputRotation atIndex:textureIndex];
[self setBlurCenter:self.blurCenter];
}
- (void)setBlurSize:(CGFloat)newValue;
{
_blurSize = newValue;
[self setFloat:_blurSize forUniform:blurSizeUniform program:filterProgram];
}
- (void)setBlurCenter:(CGPoint)newValue;
{
_blurCenter = newValue;
CGPoint rotatedPoint = [self rotatedPoint:_blurCenter forRotation:inputRotation];
[self setPoint:rotatedPoint forUniform:blurCenterUniform program:filterProgram];
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageZoomBlurFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 1,020 |
```objective-c
#import "GPUImageSwirlFilter.h"
// Adapted from the shader example here: path_to_url
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageSwirlFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform highp vec2 center;
uniform highp float radius;
uniform highp float angle;
void main()
{
highp vec2 textureCoordinateToUse = textureCoordinate;
highp float dist = distance(center, textureCoordinate);
if (dist < radius)
{
textureCoordinateToUse -= center;
highp float percent = (radius - dist) / radius;
highp float theta = percent * percent * angle * 8.0;
highp float s = sin(theta);
highp float c = cos(theta);
textureCoordinateToUse = vec2(dot(textureCoordinateToUse, vec2(c, -s)), dot(textureCoordinateToUse, vec2(s, c)));
textureCoordinateToUse += center;
}
gl_FragColor = texture2D(inputImageTexture, textureCoordinateToUse );
}
);
#else
NSString *const kGPUImageSwirlFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform vec2 center;
uniform float radius;
uniform float angle;
void main()
{
vec2 textureCoordinateToUse = textureCoordinate;
float dist = distance(center, textureCoordinate);
if (dist < radius)
{
textureCoordinateToUse -= center;
float percent = (radius - dist) / radius;
float theta = percent * percent * angle * 8.0;
float s = sin(theta);
float c = cos(theta);
textureCoordinateToUse = vec2(dot(textureCoordinateToUse, vec2(c, -s)), dot(textureCoordinateToUse, vec2(s, c)));
textureCoordinateToUse += center;
}
gl_FragColor = texture2D(inputImageTexture, textureCoordinateToUse );
}
);
#endif
@implementation GPUImageSwirlFilter
@synthesize center = _center;
@synthesize radius = _radius;
@synthesize angle = _angle;
#pragma mark -
#pragma mark Initialization and teardown
- (id)init;
{
if (!(self = [super initWithFragmentShaderFromString:kGPUImageSwirlFragmentShaderString]))
{
return nil;
}
radiusUniform = [filterProgram uniformIndex:@"radius"];
angleUniform = [filterProgram uniformIndex:@"angle"];
centerUniform = [filterProgram uniformIndex:@"center"];
self.radius = 0.5;
self.angle = 1.0;
self.center = CGPointMake(0.5, 0.5);
return self;
}
#pragma mark -
#pragma mark Accessors
- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;
{
[super setInputRotation:newInputRotation atIndex:textureIndex];
[self setCenter:self.center];
}
- (void)setRadius:(CGFloat)newValue;
{
_radius = newValue;
[self setFloat:_radius forUniform:radiusUniform program:filterProgram];
}
- (void)setAngle:(CGFloat)newValue;
{
_angle = newValue;
[self setFloat:_angle forUniform:angleUniform program:filterProgram];
}
- (void)setCenter:(CGPoint)newValue;
{
_center = newValue;
CGPoint rotatedPoint = [self rotatedPoint:_center forRotation:inputRotation];
[self setPoint:rotatedPoint forUniform:centerUniform program:filterProgram];
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageSwirlFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 770 |
```objective-c
#import "GPUImageFilterGroup.h"
@class GPUImageGaussianBlurFilter;
@class GPUImageXYDerivativeFilter;
@class GPUImageGrayscaleFilter;
@class GPUImageGaussianBlurFilter;
@class GPUImageThresholdedNonMaximumSuppressionFilter;
@class GPUImageColorPackingFilter;
//#define DEBUGFEATUREDETECTION
/** Harris corner detector
First pass: reduce to luminance and take the derivative of the luminance texture (GPUImageXYDerivativeFilter)
Second pass: blur the derivative (GPUImageGaussianBlurFilter)
Third pass: apply the Harris corner detection calculation
This is the Harris corner detector, as described in
C. Harris and M. Stephens. A Combined Corner and Edge Detector. Proc. Alvey Vision Conf., Univ. Manchester, pp. 147-151, 1988.
*/
@interface GPUImageHarrisCornerDetectionFilter : GPUImageFilterGroup
{
GPUImageXYDerivativeFilter *derivativeFilter;
GPUImageGaussianBlurFilter *blurFilter;
GPUImageFilter *harrisCornerDetectionFilter;
GPUImageThresholdedNonMaximumSuppressionFilter *nonMaximumSuppressionFilter;
GPUImageColorPackingFilter *colorPackingFilter;
GLfloat *cornersArray;
GLubyte *rawImagePixels;
}
/** The radius of the underlying Gaussian blur. The default is 2.0.
*/
@property(readwrite, nonatomic) CGFloat blurRadiusInPixels;
// This changes the dynamic range of the Harris corner detector by amplifying small cornerness values. Default is 5.0.
@property(readwrite, nonatomic) CGFloat sensitivity;
// A threshold value at which a point is recognized as being a corner after the non-maximum suppression. Default is 0.20.
@property(readwrite, nonatomic) CGFloat threshold;
// This block is called on the detection of new corner points, usually on every processed frame. A C array containing normalized coordinates in X, Y pairs is passed in, along with a count of the number of corners detected and the current timestamp of the video frame
@property(nonatomic, copy) void(^cornersDetectedBlock)(GLfloat* cornerArray, NSUInteger cornersDetected, CMTime frameTime);
// These images are only enabled when built with DEBUGFEATUREDETECTION defined, and are used to examine the intermediate states of the feature detector
@property(nonatomic, readonly, strong) NSMutableArray *intermediateImages;
// Initialization and teardown
- (id)initWithCornerDetectionFragmentShader:(NSString *)cornerDetectionFragmentShader;
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageHarrisCornerDetectionFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 512 |
```objective-c
#import "GPUImageOverlayBlendFilter.h"
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageOverlayBlendFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
varying highp vec2 textureCoordinate2;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
void main()
{
mediump vec4 base = texture2D(inputImageTexture, textureCoordinate);
mediump vec4 overlay = texture2D(inputImageTexture2, textureCoordinate2);
mediump float ra;
if (2.0 * base.r < base.a) {
ra = 2.0 * overlay.r * base.r + overlay.r * (1.0 - base.a) + base.r * (1.0 - overlay.a);
} else {
ra = overlay.a * base.a - 2.0 * (base.a - base.r) * (overlay.a - overlay.r) + overlay.r * (1.0 - base.a) + base.r * (1.0 - overlay.a);
}
mediump float ga;
if (2.0 * base.g < base.a) {
ga = 2.0 * overlay.g * base.g + overlay.g * (1.0 - base.a) + base.g * (1.0 - overlay.a);
} else {
ga = overlay.a * base.a - 2.0 * (base.a - base.g) * (overlay.a - overlay.g) + overlay.g * (1.0 - base.a) + base.g * (1.0 - overlay.a);
}
mediump float ba;
if (2.0 * base.b < base.a) {
ba = 2.0 * overlay.b * base.b + overlay.b * (1.0 - base.a) + base.b * (1.0 - overlay.a);
} else {
ba = overlay.a * base.a - 2.0 * (base.a - base.b) * (overlay.a - overlay.b) + overlay.b * (1.0 - base.a) + base.b * (1.0 - overlay.a);
}
gl_FragColor = vec4(ra, ga, ba, 1.0);
}
);
#else
NSString *const kGPUImageOverlayBlendFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
varying vec2 textureCoordinate2;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
void main()
{
vec4 base = texture2D(inputImageTexture, textureCoordinate);
vec4 overlay = texture2D(inputImageTexture2, textureCoordinate2);
float ra;
if (2.0 * base.r < base.a) {
ra = 2.0 * overlay.r * base.r + overlay.r * (1.0 - base.a) + base.r * (1.0 - overlay.a);
} else {
ra = overlay.a * base.a - 2.0 * (base.a - base.r) * (overlay.a - overlay.r) + overlay.r * (1.0 - base.a) + base.r * (1.0 - overlay.a);
}
float ga;
if (2.0 * base.g < base.a) {
ga = 2.0 * overlay.g * base.g + overlay.g * (1.0 - base.a) + base.g * (1.0 - overlay.a);
} else {
ga = overlay.a * base.a - 2.0 * (base.a - base.g) * (overlay.a - overlay.g) + overlay.g * (1.0 - base.a) + base.g * (1.0 - overlay.a);
}
float ba;
if (2.0 * base.b < base.a) {
ba = 2.0 * overlay.b * base.b + overlay.b * (1.0 - base.a) + base.b * (1.0 - overlay.a);
} else {
ba = overlay.a * base.a - 2.0 * (base.a - base.b) * (overlay.a - overlay.b) + overlay.b * (1.0 - base.a) + base.b * (1.0 - overlay.a);
}
gl_FragColor = vec4(ra, ga, ba, 1.0);
}
);
#endif
@implementation GPUImageOverlayBlendFilter
- (id)init;
{
if (!(self = [super initWithFragmentShaderFromString:kGPUImageOverlayBlendFragmentShaderString]))
{
return nil;
}
return self;
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageOverlayBlendFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 992 |
```objective-c
#import "GPUImageDirectionalSobelEdgeDetectionFilter.h"
@implementation GPUImageDirectionalSobelEdgeDetectionFilter
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageDirectionalSobelEdgeDetectionFragmentShaderString = SHADER_STRING
(
precision mediump float;
varying vec2 textureCoordinate;
varying vec2 leftTextureCoordinate;
varying vec2 rightTextureCoordinate;
varying vec2 topTextureCoordinate;
varying vec2 topLeftTextureCoordinate;
varying vec2 topRightTextureCoordinate;
varying vec2 bottomTextureCoordinate;
varying vec2 bottomLeftTextureCoordinate;
varying vec2 bottomRightTextureCoordinate;
uniform sampler2D inputImageTexture;
void main()
{
float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r;
float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r;
float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r;
float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r;
float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r;
float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r;
float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r;
float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r;
vec2 gradientDirection;
gradientDirection.x = -bottomLeftIntensity - 2.0 * leftIntensity - topLeftIntensity + bottomRightIntensity + 2.0 * rightIntensity + topRightIntensity;
gradientDirection.y = -topLeftIntensity - 2.0 * topIntensity - topRightIntensity + bottomLeftIntensity + 2.0 * bottomIntensity + bottomRightIntensity;
float gradientMagnitude = length(gradientDirection);
vec2 normalizedDirection = normalize(gradientDirection);
normalizedDirection = sign(normalizedDirection) * floor(abs(normalizedDirection) + 0.617316); // Offset by 1-sin(pi/8) to set to 0 if near axis, 1 if away
normalizedDirection = (normalizedDirection + 1.0) * 0.5; // Place -1.0 - 1.0 within 0 - 1.0
gl_FragColor = vec4(gradientMagnitude, normalizedDirection.x, normalizedDirection.y, 1.0);
}
);
#else
NSString *const kGPUImageDirectionalSobelEdgeDetectionFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
varying vec2 leftTextureCoordinate;
varying vec2 rightTextureCoordinate;
varying vec2 topTextureCoordinate;
varying vec2 topLeftTextureCoordinate;
varying vec2 topRightTextureCoordinate;
varying vec2 bottomTextureCoordinate;
varying vec2 bottomLeftTextureCoordinate;
varying vec2 bottomRightTextureCoordinate;
uniform sampler2D inputImageTexture;
void main()
{
float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r;
float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r;
float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r;
float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r;
float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r;
float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r;
float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r;
float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r;
vec2 gradientDirection;
gradientDirection.x = -bottomLeftIntensity - 2.0 * leftIntensity - topLeftIntensity + bottomRightIntensity + 2.0 * rightIntensity + topRightIntensity;
gradientDirection.y = -topLeftIntensity - 2.0 * topIntensity - topRightIntensity + bottomLeftIntensity + 2.0 * bottomIntensity + bottomRightIntensity;
float gradientMagnitude = length(gradientDirection);
vec2 normalizedDirection = normalize(gradientDirection);
normalizedDirection = sign(normalizedDirection) * floor(abs(normalizedDirection) + 0.617316); // Offset by 1-sin(pi/8) to set to 0 if near axis, 1 if away
normalizedDirection = (normalizedDirection + 1.0) * 0.5; // Place -1.0 - 1.0 within 0 - 1.0
gl_FragColor = vec4(gradientMagnitude, normalizedDirection.x, normalizedDirection.y, 1.0);
}
);
#endif
#pragma mark -
#pragma mark Initialization and teardown
- (id)init;
{
if (!(self = [super initWithFragmentShaderFromString:kGPUImageDirectionalSobelEdgeDetectionFragmentShaderString]))
{
return nil;
}
return self;
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageDirectionalSobelEdgeDetectionFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 1,041 |
```objective-c
#import "GPUImageFilter.h"
/** Creates a pinch distortion of the image
*/
@interface GPUImagePinchDistortionFilter : GPUImageFilter
{
GLint aspectRatioUniform, radiusUniform, centerUniform, scaleUniform;
}
/** The center about which to apply the distortion, with a default of (0.5, 0.5)
*/
@property(readwrite, nonatomic) CGPoint center;
/** The radius of the distortion, ranging from 0.0 to 2.0, with a default of 1.0
*/
@property(readwrite, nonatomic) CGFloat radius;
/** The amount of distortion to apply, from -2.0 to 2.0, with a default of 0.5
*/
@property(readwrite, nonatomic) CGFloat scale;
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImagePinchDistortionFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 160 |
```objective-c
#import "GPUImageBuffer.h"
@interface GPUImageBuffer()
@end
@implementation GPUImageBuffer
@synthesize bufferSize = _bufferSize;
#pragma mark -
#pragma mark Initialization and teardown
- (id)init;
{
if (!(self = [self initWithFragmentShaderFromString:kGPUImagePassthroughFragmentShaderString]))
{
return nil;
}
bufferedFramebuffers = [[NSMutableArray alloc] init];
// [bufferedTextures addObject:[NSNumber numberWithInt:outputTexture]];
_bufferSize = 1;
return self;
}
- (void)dealloc
{
for (GPUImageFramebuffer *currentFramebuffer in bufferedFramebuffers)
{
[currentFramebuffer unlock];
}
}
#pragma mark -
#pragma mark GPUImageInput
- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex;
{
if ([bufferedFramebuffers count] >= _bufferSize)
{
outputFramebuffer = [bufferedFramebuffers objectAtIndex:0];
[bufferedFramebuffers removeObjectAtIndex:0];
}
else
{
// Nothing yet in the buffer, so don't process further until the buffer is full
outputFramebuffer = firstInputFramebuffer;
[firstInputFramebuffer lock];
}
[bufferedFramebuffers addObject:firstInputFramebuffer];
// Need to pass along rotation information, as we're just holding on to buffered framebuffers and not rotating them ourselves
for (id<GPUImageInput> currentTarget in targets)
{
if (currentTarget != self.targetToIgnoreForUpdates)
{
NSInteger indexOfObject = [targets indexOfObject:currentTarget];
NSInteger textureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
[currentTarget setInputRotation:inputRotation atIndex:textureIndex];
}
}
// Let the downstream video elements see the previous frame from the buffer before rendering a new one into place
[self informTargetsAboutNewFrameAtTime:frameTime];
// [self renderToTextureWithVertices:imageVertices textureCoordinates:[[self class] textureCoordinatesForRotation:inputRotation]];
}
- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;
{
// No need to render to another texture anymore, since we'll be hanging on to the textures in our buffer
}
#pragma mark -
#pragma mark Accessors
- (void)setBufferSize:(NSUInteger)newValue;
{
if ( (newValue == _bufferSize) || (newValue < 1) )
{
return;
}
if (newValue > _bufferSize)
{
NSUInteger texturesToAdd = newValue - _bufferSize;
for (NSUInteger currentTextureIndex = 0; currentTextureIndex < texturesToAdd; currentTextureIndex++)
{
// TODO: Deal with the growth of the size of the buffer by rotating framebuffers, no textures
}
}
else
{
NSUInteger texturesToRemove = _bufferSize - newValue;
for (NSUInteger currentTextureIndex = 0; currentTextureIndex < texturesToRemove; currentTextureIndex++)
{
GPUImageFramebuffer *lastFramebuffer = [bufferedFramebuffers lastObject];
[bufferedFramebuffers removeObjectAtIndex:([bufferedFramebuffers count] - 1)];
[lastFramebuffer unlock];
lastFramebuffer = nil;
}
}
_bufferSize = newValue;
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageBuffer.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 701 |
```objective-c
#import "GPUImageTwoInputFilter.h"
@interface GPUImageLuminosityBlendFilter : GPUImageTwoInputFilter
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageLuminosityBlendFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 27 |
```objective-c
#import "GPUImageFilterGroup.h"
@class GPUImageRGBErosionFilter;
@class GPUImageRGBDilationFilter;
// A filter that first performs an erosion on each color channel of an image, followed by a dilation of the same radius.
// This helps to filter out smaller bright elements.
@interface GPUImageRGBOpeningFilter : GPUImageFilterGroup
{
GPUImageRGBErosionFilter *erosionFilter;
GPUImageRGBDilationFilter *dilationFilter;
}
- (id)initWithRadius:(NSUInteger)radius;
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageRGBOpeningFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 114 |
```objective-c
#import "GPUImageTwoInputFilter.h"
@interface GPUImageScreenBlendFilter : GPUImageTwoInputFilter
{
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageScreenBlendFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 27 |
```objective-c
#import "GPUImageFilter.h"
@interface GPUImageCropFilter : GPUImageFilter
{
GLfloat cropTextureCoordinates[8];
}
// The crop region is the rectangle within the image to crop. It is normalized to a coordinate space from 0.0 to 1.0, with 0.0, 0.0 being the upper left corner of the image
@property(readwrite, nonatomic) CGRect cropRegion;
// Initialization and teardown
- (id)initWithCropRegion:(CGRect)newCropRegion;
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageCropFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 107 |
```objective-c
#import "GPUImageVoronoiConsumerFilter.h"
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageVoronoiConsumerFragmentShaderString = SHADER_STRING
(
precision highp float;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
uniform vec2 size;
varying vec2 textureCoordinate;
vec2 getCoordFromColor(vec4 color)
{
float z = color.z * 256.0;
float yoff = floor(z / 8.0);
float xoff = mod(z, 8.0);
float x = color.x*256.0 + xoff*256.0;
float y = color.y*256.0 + yoff*256.0;
return vec2(x,y) / size;
}
void main(void) {
vec4 colorLoc = texture2D(inputImageTexture2, textureCoordinate);
vec4 color = texture2D(inputImageTexture, getCoordFromColor(colorLoc));
gl_FragColor = color;
}
);
#else
NSString *const kGPUImageVoronoiConsumerFragmentShaderString = SHADER_STRING
(
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
uniform vec2 size;
varying vec2 textureCoordinate;
vec2 getCoordFromColor(vec4 color)
{
float z = color.z * 256.0;
float yoff = floor(z / 8.0);
float xoff = mod(z, 8.0);
float x = color.x*256.0 + xoff*256.0;
float y = color.y*256.0 + yoff*256.0;
return vec2(x,y) / size;
}
void main(void)
{
vec4 colorLoc = texture2D(inputImageTexture2, textureCoordinate);
vec4 color = texture2D(inputImageTexture, getCoordFromColor(colorLoc));
gl_FragColor = color;
}
);
#endif
@implementation GPUImageVoronoiConsumerFilter
@synthesize sizeInPixels = _sizeInPixels;
- (id)init;
{
if (!(self = [super initWithFragmentShaderFromString:kGPUImageVoronoiConsumerFragmentShaderString]))
{
return nil;
}
sizeUniform = [filterProgram uniformIndex:@"size"];
return self;
}
-(void)setSizeInPixels:(CGSize)sizeInPixels {
_sizeInPixels = sizeInPixels;
//validate that it's a power of 2 and square
float width = log2(sizeInPixels.width);
float height = log2(sizeInPixels.height);
if (width != height) {
NSLog(@"Voronoi point texture must be square");
return;
}
if (width != floor(width) || height != floor(height)) {
NSLog(@"Voronoi point texture must be a power of 2. Texture size %f, %f", sizeInPixels.width, sizeInPixels.height);
return;
}
glUniform2f(sizeUniform, _sizeInPixels.width, _sizeInPixels.height);
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageVoronoiConsumerFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 665 |
```objective-c
#import "GPUImageTransformFilter.h"
NSString *const kGPUImageTransformVertexShaderString = SHADER_STRING
(
attribute vec4 position;
attribute vec4 inputTextureCoordinate;
uniform mat4 transformMatrix;
uniform mat4 orthographicMatrix;
varying vec2 textureCoordinate;
void main()
{
gl_Position = transformMatrix * vec4(position.xyz, 1.0) * orthographicMatrix;
textureCoordinate = inputTextureCoordinate.xy;
}
);
@implementation GPUImageTransformFilter
@synthesize affineTransform;
@synthesize transform3D = _transform3D;
@synthesize ignoreAspectRatio = _ignoreAspectRatio;
@synthesize anchorTopLeft = _anchorTopLeft;
#pragma mark -
#pragma mark Initialization and teardown
- (id)init;
{
if (!(self = [super initWithVertexShaderFromString:kGPUImageTransformVertexShaderString fragmentShaderFromString:kGPUImagePassthroughFragmentShaderString]))
{
return nil;
}
transformMatrixUniform = [filterProgram uniformIndex:@"transformMatrix"];
orthographicMatrixUniform = [filterProgram uniformIndex:@"orthographicMatrix"];
self.transform3D = CATransform3DIdentity;
return self;
}
#pragma mark -
#pragma mark Conversion from matrix formats
- (void)loadOrthoMatrix:(GLfloat *)matrix left:(GLfloat)left right:(GLfloat)right bottom:(GLfloat)bottom top:(GLfloat)top near:(GLfloat)near far:(GLfloat)far;
{
GLfloat r_l = right - left;
GLfloat t_b = top - bottom;
GLfloat f_n = far - near;
GLfloat tx = - (right + left) / (right - left);
GLfloat ty = - (top + bottom) / (top - bottom);
GLfloat tz = - (far + near) / (far - near);
float scale = 2.0f;
if (_anchorTopLeft)
{
scale = 4.0f;
tx=-1.0f;
ty=-1.0f;
}
matrix[0] = scale / r_l;
matrix[1] = 0.0f;
matrix[2] = 0.0f;
matrix[3] = tx;
matrix[4] = 0.0f;
matrix[5] = scale / t_b;
matrix[6] = 0.0f;
matrix[7] = ty;
matrix[8] = 0.0f;
matrix[9] = 0.0f;
matrix[10] = scale / f_n;
matrix[11] = tz;
matrix[12] = 0.0f;
matrix[13] = 0.0f;
matrix[14] = 0.0f;
matrix[15] = 1.0f;
}
//- (void)convert3DTransform:(CATransform3D *)transform3D toMatrix:(GLfloat *)matrix;
//{
// // struct CATransform3D
// // {
// // CGFloat m11, m12, m13, m14;
// // CGFloat m21, m22, m23, m24;
// // CGFloat m31, m32, m33, m34;
// // CGFloat m41, m42, m43, m44;
// // };
//
// matrix[0] = (GLfloat)transform3D->m11;
// matrix[1] = (GLfloat)transform3D->m12;
// matrix[2] = (GLfloat)transform3D->m13;
// matrix[3] = (GLfloat)transform3D->m14;
// matrix[4] = (GLfloat)transform3D->m21;
// matrix[5] = (GLfloat)transform3D->m22;
// matrix[6] = (GLfloat)transform3D->m23;
// matrix[7] = (GLfloat)transform3D->m24;
// matrix[8] = (GLfloat)transform3D->m31;
// matrix[9] = (GLfloat)transform3D->m32;
// matrix[10] = (GLfloat)transform3D->m33;
// matrix[11] = (GLfloat)transform3D->m34;
// matrix[12] = (GLfloat)transform3D->m41;
// matrix[13] = (GLfloat)transform3D->m42;
// matrix[14] = (GLfloat)transform3D->m43;
// matrix[15] = (GLfloat)transform3D->m44;
//}
- (void)convert3DTransform:(CATransform3D *)transform3D toMatrix:(GPUMatrix4x4 *)matrix;
{
// struct CATransform3D
// {
// CGFloat m11, m12, m13, m14;
// CGFloat m21, m22, m23, m24;
// CGFloat m31, m32, m33, m34;
// CGFloat m41, m42, m43, m44;
// };
GLfloat *mappedMatrix = (GLfloat *)matrix;
mappedMatrix[0] = (GLfloat)transform3D->m11;
mappedMatrix[1] = (GLfloat)transform3D->m12;
mappedMatrix[2] = (GLfloat)transform3D->m13;
mappedMatrix[3] = (GLfloat)transform3D->m14;
mappedMatrix[4] = (GLfloat)transform3D->m21;
mappedMatrix[5] = (GLfloat)transform3D->m22;
mappedMatrix[6] = (GLfloat)transform3D->m23;
mappedMatrix[7] = (GLfloat)transform3D->m24;
mappedMatrix[8] = (GLfloat)transform3D->m31;
mappedMatrix[9] = (GLfloat)transform3D->m32;
mappedMatrix[10] = (GLfloat)transform3D->m33;
mappedMatrix[11] = (GLfloat)transform3D->m34;
mappedMatrix[12] = (GLfloat)transform3D->m41;
mappedMatrix[13] = (GLfloat)transform3D->m42;
mappedMatrix[14] = (GLfloat)transform3D->m43;
mappedMatrix[15] = (GLfloat)transform3D->m44;
}
#pragma mark -
#pragma mark GPUImageInput
- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex;
{
CGSize currentFBOSize = [self sizeOfFBO];
CGFloat normalizedHeight = currentFBOSize.height / currentFBOSize.width;
GLfloat adjustedVertices[] = {
-1.0f, -normalizedHeight,
1.0f, -normalizedHeight,
-1.0f, normalizedHeight,
1.0f, normalizedHeight,
};
static const GLfloat squareVertices[] = {
-1.0f, -1.0f,
1.0f, -1.0f,
-1.0f, 1.0f,
1.0f, 1.0f,
};
GLfloat adjustedVerticesAnchorTL[] = {
0.0f, 0.0f,
1.0f, 0.0f,
0.0f, normalizedHeight,
1.0f, normalizedHeight,
};
static const GLfloat squareVerticesAnchorTL[] = {
0.0f, 0.0f,
1.0f, 0.0f,
0.0f, 1.0f,
1.0f, 1.0f,
};
if (_ignoreAspectRatio)
{
if (_anchorTopLeft)
{
[self renderToTextureWithVertices:squareVerticesAnchorTL textureCoordinates:[[self class] textureCoordinatesForRotation:inputRotation]];
}
else
{
[self renderToTextureWithVertices:squareVertices textureCoordinates:[[self class] textureCoordinatesForRotation:inputRotation]];
}
}
else
{
if (_anchorTopLeft)
{
[self renderToTextureWithVertices:adjustedVerticesAnchorTL textureCoordinates:[[self class] textureCoordinatesForRotation:inputRotation]];
}
else
{
[self renderToTextureWithVertices:adjustedVertices textureCoordinates:[[self class] textureCoordinatesForRotation:inputRotation]];
}
}
[self informTargetsAboutNewFrameAtTime:frameTime];
}
- (void)setupFilterForSize:(CGSize)filterFrameSize;
{
if (!_ignoreAspectRatio)
{
[self loadOrthoMatrix:(GLfloat *)&orthographicMatrix left:-1.0 right:1.0 bottom:(-1.0 * filterFrameSize.height / filterFrameSize.width) top:(1.0 * filterFrameSize.height / filterFrameSize.width) near:-1.0 far:1.0];
// [self loadOrthoMatrix:orthographicMatrix left:-1.0 right:1.0 bottom:(-1.0 * (GLfloat)backingHeight / (GLfloat)backingWidth) top:(1.0 * (GLfloat)backingHeight / (GLfloat)backingWidth) near:-2.0 far:2.0];
[self setMatrix4f:orthographicMatrix forUniform:orthographicMatrixUniform program:filterProgram];
}
}
#pragma mark -
#pragma mark Accessors
- (void)setAffineTransform:(CGAffineTransform)newValue;
{
self.transform3D = CATransform3DMakeAffineTransform(newValue);
}
- (CGAffineTransform)affineTransform;
{
return CATransform3DGetAffineTransform(self.transform3D);
}
- (void)setTransform3D:(CATransform3D)newValue;
{
_transform3D = newValue;
GPUMatrix4x4 temporaryMatrix;
[self convert3DTransform:&_transform3D toMatrix:&temporaryMatrix];
[self setMatrix4f:temporaryMatrix forUniform:transformMatrixUniform program:filterProgram];
}
- (void)setIgnoreAspectRatio:(BOOL)newValue;
{
_ignoreAspectRatio = newValue;
if (_ignoreAspectRatio)
{
[self loadOrthoMatrix:(GLfloat *)&orthographicMatrix left:-1.0 right:1.0 bottom:-1.0 top:1.0 near:-1.0 far:1.0];
[self setMatrix4f:orthographicMatrix forUniform:orthographicMatrixUniform program:filterProgram];
}
else
{
[self setupFilterForSize:[self sizeOfFBO]];
}
}
- (void)setAnchorTopLeft:(BOOL)newValue
{
_anchorTopLeft = newValue;
[self setIgnoreAspectRatio:_ignoreAspectRatio];
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageTransformFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 2,340 |
```objective-c
#import "GPUImageTwoInputFilter.h"
@interface GPUImageMultiplyBlendFilter : GPUImageTwoInputFilter
{
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageMultiplyBlendFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 27 |
```objective-c
#import "GPUImageHueFilter.h"
// Adapted from path_to_url - see for code and discussion
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageHueFragmentShaderString = SHADER_STRING
(
precision highp float;
varying highp vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform mediump float hueAdjust;
const highp vec4 kRGBToYPrime = vec4 (0.299, 0.587, 0.114, 0.0);
const highp vec4 kRGBToI = vec4 (0.595716, -0.274453, -0.321263, 0.0);
const highp vec4 kRGBToQ = vec4 (0.211456, -0.522591, 0.31135, 0.0);
const highp vec4 kYIQToR = vec4 (1.0, 0.9563, 0.6210, 0.0);
const highp vec4 kYIQToG = vec4 (1.0, -0.2721, -0.6474, 0.0);
const highp vec4 kYIQToB = vec4 (1.0, -1.1070, 1.7046, 0.0);
void main ()
{
// Sample the input pixel
highp vec4 color = texture2D(inputImageTexture, textureCoordinate);
// Convert to YIQ
highp float YPrime = dot (color, kRGBToYPrime);
highp float I = dot (color, kRGBToI);
highp float Q = dot (color, kRGBToQ);
// Calculate the hue and chroma
highp float hue = atan (Q, I);
highp float chroma = sqrt (I * I + Q * Q);
// Make the user's adjustments
hue += (-hueAdjust); //why negative rotation?
// Convert back to YIQ
Q = chroma * sin (hue);
I = chroma * cos (hue);
// Convert back to RGB
highp vec4 yIQ = vec4 (YPrime, I, Q, 0.0);
color.r = dot (yIQ, kYIQToR);
color.g = dot (yIQ, kYIQToG);
color.b = dot (yIQ, kYIQToB);
// Save the result
gl_FragColor = color;
}
);
#else
NSString *const kGPUImageHueFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform float hueAdjust;
const vec4 kRGBToYPrime = vec4 (0.299, 0.587, 0.114, 0.0);
const vec4 kRGBToI = vec4 (0.595716, -0.274453, -0.321263, 0.0);
const vec4 kRGBToQ = vec4 (0.211456, -0.522591, 0.31135, 0.0);
const vec4 kYIQToR = vec4 (1.0, 0.9563, 0.6210, 0.0);
const vec4 kYIQToG = vec4 (1.0, -0.2721, -0.6474, 0.0);
const vec4 kYIQToB = vec4 (1.0, -1.1070, 1.7046, 0.0);
void main ()
{
// Sample the input pixel
vec4 color = texture2D(inputImageTexture, textureCoordinate);
// Convert to YIQ
float YPrime = dot (color, kRGBToYPrime);
float I = dot (color, kRGBToI);
float Q = dot (color, kRGBToQ);
// Calculate the hue and chroma
float hue = atan (Q, I);
float chroma = sqrt (I * I + Q * Q);
// Make the user's adjustments
hue += (-hueAdjust); //why negative rotation?
// Convert back to YIQ
Q = chroma * sin (hue);
I = chroma * cos (hue);
// Convert back to RGB
vec4 yIQ = vec4 (YPrime, I, Q, 0.0);
color.r = dot (yIQ, kYIQToR);
color.g = dot (yIQ, kYIQToG);
color.b = dot (yIQ, kYIQToB);
// Save the result
gl_FragColor = color;
}
);
#endif
@implementation GPUImageHueFilter
@synthesize hue;
- (id)init
{
if(! (self = [super initWithFragmentShaderFromString:kGPUImageHueFragmentShaderString]) )
{
return nil;
}
hueAdjustUniform = [filterProgram uniformIndex:@"hueAdjust"];
self.hue = 90;
return self;
}
- (void)setHue:(CGFloat)newHue
{
// Convert degrees to radians for hue rotation
hue = fmodf(newHue, 360.0) * M_PI/180;
[self setFloat:hue forUniform:hueAdjustUniform program:filterProgram];
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageHueFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 1,242 |
```objective-c
#import "GPUImageNonMaximumSuppressionFilter.h"
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageNonMaximumSuppressionFragmentShaderString = SHADER_STRING
(
uniform sampler2D inputImageTexture;
varying highp vec2 textureCoordinate;
varying highp vec2 leftTextureCoordinate;
varying highp vec2 rightTextureCoordinate;
varying highp vec2 topTextureCoordinate;
varying highp vec2 topLeftTextureCoordinate;
varying highp vec2 topRightTextureCoordinate;
varying highp vec2 bottomTextureCoordinate;
varying highp vec2 bottomLeftTextureCoordinate;
varying highp vec2 bottomRightTextureCoordinate;
void main()
{
lowp float bottomColor = texture2D(inputImageTexture, bottomTextureCoordinate).r;
lowp float bottomLeftColor = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r;
lowp float bottomRightColor = texture2D(inputImageTexture, bottomRightTextureCoordinate).r;
lowp vec4 centerColor = texture2D(inputImageTexture, textureCoordinate);
lowp float leftColor = texture2D(inputImageTexture, leftTextureCoordinate).r;
lowp float rightColor = texture2D(inputImageTexture, rightTextureCoordinate).r;
lowp float topColor = texture2D(inputImageTexture, topTextureCoordinate).r;
lowp float topRightColor = texture2D(inputImageTexture, topRightTextureCoordinate).r;
lowp float topLeftColor = texture2D(inputImageTexture, topLeftTextureCoordinate).r;
// Use a tiebreaker for pixels to the left and immediately above this one
lowp float multiplier = 1.0 - step(centerColor.r, topColor);
multiplier = multiplier * (1.0 - step(centerColor.r, topLeftColor));
multiplier = multiplier * (1.0 - step(centerColor.r, leftColor));
multiplier = multiplier * (1.0 - step(centerColor.r, bottomLeftColor));
lowp float maxValue = max(centerColor.r, bottomColor);
maxValue = max(maxValue, bottomRightColor);
maxValue = max(maxValue, rightColor);
maxValue = max(maxValue, topRightColor);
gl_FragColor = vec4((centerColor.rgb * step(maxValue, centerColor.r) * multiplier), 1.0);
}
);
#else
NSString *const kGPUImageNonMaximumSuppressionFragmentShaderString = SHADER_STRING
(
uniform sampler2D inputImageTexture;
varying vec2 textureCoordinate;
varying vec2 leftTextureCoordinate;
varying vec2 rightTextureCoordinate;
varying vec2 topTextureCoordinate;
varying vec2 topLeftTextureCoordinate;
varying vec2 topRightTextureCoordinate;
varying vec2 bottomTextureCoordinate;
varying vec2 bottomLeftTextureCoordinate;
varying vec2 bottomRightTextureCoordinate;
void main()
{
float bottomColor = texture2D(inputImageTexture, bottomTextureCoordinate).r;
float bottomLeftColor = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r;
float bottomRightColor = texture2D(inputImageTexture, bottomRightTextureCoordinate).r;
vec4 centerColor = texture2D(inputImageTexture, textureCoordinate);
float leftColor = texture2D(inputImageTexture, leftTextureCoordinate).r;
float rightColor = texture2D(inputImageTexture, rightTextureCoordinate).r;
float topColor = texture2D(inputImageTexture, topTextureCoordinate).r;
float topRightColor = texture2D(inputImageTexture, topRightTextureCoordinate).r;
float topLeftColor = texture2D(inputImageTexture, topLeftTextureCoordinate).r;
// Use a tiebreaker for pixels to the left and immediately above this one
float multiplier = 1.0 - step(centerColor.r, topColor);
multiplier = multiplier * (1.0 - step(centerColor.r, topLeftColor));
multiplier = multiplier * (1.0 - step(centerColor.r, leftColor));
multiplier = multiplier * (1.0 - step(centerColor.r, bottomLeftColor));
float maxValue = max(centerColor.r, bottomColor);
maxValue = max(maxValue, bottomRightColor);
maxValue = max(maxValue, rightColor);
maxValue = max(maxValue, topRightColor);
gl_FragColor = vec4((centerColor.rgb * step(maxValue, centerColor.r) * multiplier), 1.0);
}
);
#endif
@implementation GPUImageNonMaximumSuppressionFilter
#pragma mark -
#pragma mark Initialization and teardown
- (id)init;
{
if (!(self = [super initWithFragmentShaderFromString:kGPUImageNonMaximumSuppressionFragmentShaderString]))
{
return nil;
}
return self;
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageNonMaximumSuppressionFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 996 |
```objective-c
#import "GPUImageContext.h"
#import "GPUImageFramebuffer.h"
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
#import <UIKit/UIKit.h>
#else
// For now, just redefine this on the Mac
typedef NS_ENUM(NSInteger, UIImageOrientation) {
UIImageOrientationUp, // default orientation
UIImageOrientationDown, // 180 deg rotation
UIImageOrientationLeft, // 90 deg CCW
UIImageOrientationRight, // 90 deg CW
UIImageOrientationUpMirrored, // as above but image mirrored along other axis. horizontal flip
UIImageOrientationDownMirrored, // horizontal flip
UIImageOrientationLeftMirrored, // vertical flip
UIImageOrientationRightMirrored, // vertical flip
};
#endif
void runOnMainQueueWithoutDeadlocking(void (^block)(void));
void runSynchronouslyOnVideoProcessingQueue(void (^block)(void));
void runAsynchronouslyOnVideoProcessingQueue(void (^block)(void));
void runSynchronouslyOnContextQueue(GPUImageContext *context, void (^block)(void));
void runAsynchronouslyOnContextQueue(GPUImageContext *context, void (^block)(void));
void reportAvailableMemoryForGPUImage(NSString *tag);
@class GPUImageMovieWriter;
/** GPUImage's base source object
Images or frames of video are uploaded from source objects, which are subclasses of GPUImageOutput. These include:
- GPUImageVideoCamera (for live video from an iOS camera)
- GPUImageStillCamera (for taking photos with the camera)
- GPUImagePicture (for still images)
- GPUImageMovie (for movies)
Source objects upload still image frames to OpenGL ES as textures, then hand those textures off to the next objects in the processing chain.
*/
@interface GPUImageOutput : NSObject
{
GPUImageFramebuffer *outputFramebuffer;
NSMutableArray *targets, *targetTextureIndices;
CGSize inputTextureSize, cachedMaximumOutputSize, forcedMaximumSize;
BOOL overrideInputSize;
BOOL allTargetsWantMonochromeData;
BOOL usingNextFrameForImageCapture;
}
@property(readwrite, nonatomic) BOOL shouldSmoothlyScaleOutput;
@property(readwrite, nonatomic) BOOL shouldIgnoreUpdatesToThisTarget;
@property(readwrite, nonatomic, retain) GPUImageMovieWriter *audioEncodingTarget;
@property(readwrite, nonatomic, unsafe_unretained) id<GPUImageInput> targetToIgnoreForUpdates;
@property(nonatomic, copy) void(^frameProcessingCompletionBlock)(GPUImageOutput*, CMTime);
@property(nonatomic) BOOL enabled;
@property(readwrite, nonatomic) GPUTextureOptions outputTextureOptions;
/// @name Managing targets
- (void)setInputFramebufferForTarget:(id<GPUImageInput>)target atIndex:(NSInteger)inputTextureIndex;
- (GPUImageFramebuffer *)framebufferForOutput;
- (void)removeOutputFramebuffer;
- (void)notifyTargetsAboutNewOutputTexture;
/** Returns an array of the current targets.
*/
- (NSArray*)targets;
/** Adds a target to receive notifications when new frames are available.
The target will be asked for its next available texture.
See [GPUImageInput newFrameReadyAtTime:]
@param newTarget Target to be added
*/
- (void)addTarget:(id<GPUImageInput>)newTarget;
/** Adds a target to receive notifications when new frames are available.
See [GPUImageInput newFrameReadyAtTime:]
@param newTarget Target to be added
*/
- (void)addTarget:(id<GPUImageInput>)newTarget atTextureLocation:(NSInteger)textureLocation;
/** Removes a target. The target will no longer receive notifications when new frames are available.
@param targetToRemove Target to be removed
*/
- (void)removeTarget:(id<GPUImageInput>)targetToRemove;
/** Removes all targets.
*/
- (void)removeAllTargets;
/// @name Manage the output texture
- (void)forceProcessingAtSize:(CGSize)frameSize;
- (void)forceProcessingAtSizeRespectingAspectRatio:(CGSize)frameSize;
/// @name Still image processing
- (void)useNextFrameForImageCapture;
- (CGImageRef)newCGImageFromCurrentlyProcessedOutput;
- (CGImageRef)newCGImageByFilteringCGImage:(CGImageRef)imageToFilter;
// Platform-specific image output methods
// If you're trying to use these methods, remember that you need to set -useNextFrameForImageCapture before running -processImage or running video and calling any of these methods, or you will get a nil image
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
- (UIImage *)imageFromCurrentFramebuffer;
- (UIImage *)imageFromCurrentFramebufferWithOrientation:(UIImageOrientation)imageOrientation;
- (UIImage *)imageByFilteringImage:(UIImage *)imageToFilter;
- (CGImageRef)newCGImageByFilteringImage:(UIImage *)imageToFilter;
#else
- (NSImage *)imageFromCurrentFramebuffer;
- (NSImage *)imageFromCurrentFramebufferWithOrientation:(UIImageOrientation)imageOrientation;
- (NSImage *)imageByFilteringImage:(NSImage *)imageToFilter;
- (CGImageRef)newCGImageByFilteringImage:(NSImage *)imageToFilter;
#endif
- (BOOL)providesMonochromeOutput;
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageOutput.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 1,095 |
```objective-c
#import "GPUImageToneCurveFilter.h"
#pragma mark -
#pragma mark GPUImageACVFile Helper
// GPUImageACVFile
//
// ACV File format Parser
// Please refer to path_to_url#50577411_pgfId-1056330
//
@interface GPUImageACVFile : NSObject{
short version;
short totalCurves;
NSArray *rgbCompositeCurvePoints;
NSArray *redCurvePoints;
NSArray *greenCurvePoints;
NSArray *blueCurvePoints;
}
@property(strong,nonatomic) NSArray *rgbCompositeCurvePoints;
@property(strong,nonatomic) NSArray *redCurvePoints;
@property(strong,nonatomic) NSArray *greenCurvePoints;
@property(strong,nonatomic) NSArray *blueCurvePoints;
- (id) initWithACVFileData:(NSData*)data;
unsigned short int16WithBytes(Byte* bytes);
@end
@implementation GPUImageACVFile
@synthesize rgbCompositeCurvePoints, redCurvePoints, greenCurvePoints, blueCurvePoints;
- (id) initWithACVFileData:(NSData *)data {
self = [super init];
if (self != nil)
{
if (data.length == 0)
{
NSLog(@"failed to init ACVFile with data:%@", data);
return self;
}
Byte* rawBytes = (Byte*) [data bytes];
version = int16WithBytes(rawBytes);
rawBytes+=2;
totalCurves = int16WithBytes(rawBytes);
rawBytes+=2;
NSMutableArray *curves = [NSMutableArray new];
float pointRate = (1.0 / 255);
// The following is the data for each curve specified by count above
for (NSInteger x = 0; x<totalCurves; x++)
{
unsigned short pointCount = int16WithBytes(rawBytes);
rawBytes+=2;
NSMutableArray *points = [NSMutableArray new];
// point count * 4
// Curve points. Each curve point is a pair of short integers where
// the first number is the output value (vertical coordinate on the
// Curves dialog graph) and the second is the input value. All coordinates have range 0 to 255.
for (NSInteger y = 0; y<pointCount; y++)
{
unsigned short y = int16WithBytes(rawBytes);
rawBytes+=2;
unsigned short x = int16WithBytes(rawBytes);
rawBytes+=2;
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
[points addObject:[NSValue valueWithCGSize:CGSizeMake(x * pointRate, y * pointRate)]];
#else
[points addObject:[NSValue valueWithSize:CGSizeMake(x * pointRate, y * pointRate)]];
#endif
}
[curves addObject:points];
}
rgbCompositeCurvePoints = [curves objectAtIndex:0];
redCurvePoints = [curves objectAtIndex:1];
greenCurvePoints = [curves objectAtIndex:2];
blueCurvePoints = [curves objectAtIndex:3];
}
return self;
}
unsigned short int16WithBytes(Byte* bytes) {
uint16_t result;
memcpy(&result, bytes, sizeof(result));
return CFSwapInt16BigToHost(result);
}
@end
#pragma mark -
#pragma mark GPUImageToneCurveFilter Implementation
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageToneCurveFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform sampler2D toneCurveTexture;
void main()
{
lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
lowp float redCurveValue = texture2D(toneCurveTexture, vec2(textureColor.r, 0.0)).r;
lowp float greenCurveValue = texture2D(toneCurveTexture, vec2(textureColor.g, 0.0)).g;
lowp float blueCurveValue = texture2D(toneCurveTexture, vec2(textureColor.b, 0.0)).b;
gl_FragColor = vec4(redCurveValue, greenCurveValue, blueCurveValue, textureColor.a);
}
);
#else
NSString *const kGPUImageToneCurveFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform sampler2D toneCurveTexture;
void main()
{
vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
float redCurveValue = texture2D(toneCurveTexture, vec2(textureColor.r, 0.0)).r;
float greenCurveValue = texture2D(toneCurveTexture, vec2(textureColor.g, 0.0)).g;
float blueCurveValue = texture2D(toneCurveTexture, vec2(textureColor.b, 0.0)).b;
gl_FragColor = vec4(redCurveValue, greenCurveValue, blueCurveValue, textureColor.a);
}
);
#endif
@interface GPUImageToneCurveFilter()
{
GLint toneCurveTextureUniform;
GLuint toneCurveTexture;
GLubyte *toneCurveByteArray;
NSArray *_redCurve, *_greenCurve, *_blueCurve, *_rgbCompositeCurve;
}
@end
@implementation GPUImageToneCurveFilter
@synthesize rgbCompositeControlPoints = _rgbCompositeControlPoints;
@synthesize redControlPoints = _redControlPoints;
@synthesize greenControlPoints = _greenControlPoints;
@synthesize blueControlPoints = _blueControlPoints;
#pragma mark -
#pragma mark Initialization and teardown
- (id)init;
{
if (!(self = [super initWithFragmentShaderFromString:kGPUImageToneCurveFragmentShaderString]))
{
return nil;
}
toneCurveTextureUniform = [filterProgram uniformIndex:@"toneCurveTexture"];
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSArray *defaultCurve = [NSArray arrayWithObjects:[NSValue valueWithCGPoint:CGPointMake(0.0, 0.0)], [NSValue valueWithCGPoint:CGPointMake(0.5, 0.5)], [NSValue valueWithCGPoint:CGPointMake(1.0, 1.0)], nil];
#else
NSArray *defaultCurve = [NSArray arrayWithObjects:[NSValue valueWithPoint:NSMakePoint(0.0, 0.0)], [NSValue valueWithPoint:NSMakePoint(0.5, 0.5)], [NSValue valueWithPoint:NSMakePoint(1.0, 1.0)], nil];
#endif
[self setRgbCompositeControlPoints:defaultCurve];
[self setRedControlPoints:defaultCurve];
[self setGreenControlPoints:defaultCurve];
[self setBlueControlPoints:defaultCurve];
return self;
}
// This pulls in Adobe ACV curve files to specify the tone curve
- (id)initWithACVData:(NSData *)data {
if (!(self = [super initWithFragmentShaderFromString:kGPUImageToneCurveFragmentShaderString]))
{
return nil;
}
toneCurveTextureUniform = [filterProgram uniformIndex:@"toneCurveTexture"];
GPUImageACVFile *curve = [[GPUImageACVFile alloc] initWithACVFileData:data];
[self setRgbCompositeControlPoints:curve.rgbCompositeCurvePoints];
[self setRedControlPoints:curve.redCurvePoints];
[self setGreenControlPoints:curve.greenCurvePoints];
[self setBlueControlPoints:curve.blueCurvePoints];
curve = nil;
return self;
}
- (id)initWithACV:(NSString*)curveFilename
{
return [self initWithACVURL:[[NSBundle mainBundle] URLForResource:curveFilename
withExtension:@"acv"]];
}
- (id)initWithACVURL:(NSURL*)curveFileURL
{
NSData* fileData = [NSData dataWithContentsOfURL:curveFileURL];
return [self initWithACVData:fileData];
}
- (void)setPointsWithACV:(NSString*)curveFilename
{
[self setPointsWithACVURL:[[NSBundle mainBundle] URLForResource:curveFilename withExtension:@"acv"]];
}
- (void)setPointsWithACVURL:(NSURL*)curveFileURL
{
NSData* fileData = [NSData dataWithContentsOfURL:curveFileURL];
GPUImageACVFile *curve = [[GPUImageACVFile alloc] initWithACVFileData:fileData];
[self setRgbCompositeControlPoints:curve.rgbCompositeCurvePoints];
[self setRedControlPoints:curve.redCurvePoints];
[self setGreenControlPoints:curve.greenCurvePoints];
[self setBlueControlPoints:curve.blueCurvePoints];
curve = nil;
}
- (void)dealloc
{
runSynchronouslyOnVideoProcessingQueue(^{
[GPUImageContext useImageProcessingContext];
if (toneCurveTexture)
{
glDeleteTextures(1, &toneCurveTexture);
toneCurveTexture = 0;
free(toneCurveByteArray);
}
});
}
#pragma mark -
#pragma mark Curve calculation
- (NSArray *)getPreparedSplineCurve:(NSArray *)points
{
if (points && [points count] > 0)
{
// Sort the array.
NSArray *sortedPoints = [points sortedArrayUsingComparator:^NSComparisonResult(id a, id b) {
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
float x1 = [(NSValue *)a CGPointValue].x;
float x2 = [(NSValue *)b CGPointValue].x;
#else
float x1 = [(NSValue *)a pointValue].x;
float x2 = [(NSValue *)b pointValue].x;
#endif
return x1 > x2;
}];
// Convert from (0, 1) to (0, 255).
NSMutableArray *convertedPoints = [NSMutableArray arrayWithCapacity:[sortedPoints count]];
for (int i=0; i<[points count]; i++){
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
CGPoint point = [[sortedPoints objectAtIndex:i] CGPointValue];
#else
NSPoint point = [[sortedPoints objectAtIndex:i] pointValue];
#endif
point.x = point.x * 255;
point.y = point.y * 255;
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
[convertedPoints addObject:[NSValue valueWithCGPoint:point]];
#else
[convertedPoints addObject:[NSValue valueWithPoint:point]];
#endif
}
NSMutableArray *splinePoints = [self splineCurve:convertedPoints];
// If we have a first point like (0.3, 0) we'll be missing some points at the beginning
// that should be 0.
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
CGPoint firstSplinePoint = [[splinePoints objectAtIndex:0] CGPointValue];
#else
NSPoint firstSplinePoint = [[splinePoints objectAtIndex:0] pointValue];
#endif
if (firstSplinePoint.x > 0) {
for (int i=firstSplinePoint.x; i >= 0; i--) {
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
CGPoint newCGPoint = CGPointMake(i, 0);
[splinePoints insertObject:[NSValue valueWithCGPoint:newCGPoint] atIndex:0];
#else
NSPoint newNSPoint = NSMakePoint(i, 0);
[splinePoints insertObject:[NSValue valueWithPoint:newNSPoint] atIndex:0];
#endif
}
}
// Insert points similarly at the end, if necessary.
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
CGPoint lastSplinePoint = [[splinePoints lastObject] CGPointValue];
if (lastSplinePoint.x < 255) {
for (int i = lastSplinePoint.x + 1; i <= 255; i++) {
CGPoint newCGPoint = CGPointMake(i, 255);
[splinePoints addObject:[NSValue valueWithCGPoint:newCGPoint]];
}
}
#else
NSPoint lastSplinePoint = [[splinePoints lastObject] pointValue];
if (lastSplinePoint.x < 255) {
for (int i = lastSplinePoint.x + 1; i <= 255; i++) {
NSPoint newNSPoint = NSMakePoint(i, 255);
[splinePoints addObject:[NSValue valueWithPoint:newNSPoint]];
}
}
#endif
// Prepare the spline points.
NSMutableArray *preparedSplinePoints = [NSMutableArray arrayWithCapacity:[splinePoints count]];
for (int i=0; i<[splinePoints count]; i++)
{
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
CGPoint newPoint = [[splinePoints objectAtIndex:i] CGPointValue];
#else
NSPoint newPoint = [[splinePoints objectAtIndex:i] pointValue];
#endif
CGPoint origPoint = CGPointMake(newPoint.x, newPoint.x);
float distance = sqrt(pow((origPoint.x - newPoint.x), 2.0) + pow((origPoint.y - newPoint.y), 2.0));
if (origPoint.y > newPoint.y)
{
distance = -distance;
}
[preparedSplinePoints addObject:[NSNumber numberWithFloat:distance]];
}
return preparedSplinePoints;
}
return nil;
}
- (NSMutableArray *)splineCurve:(NSArray *)points
{
NSMutableArray *sdA = [self secondDerivative:points];
// [points count] is equal to [sdA count]
NSInteger n = [sdA count];
if (n < 1)
{
return nil;
}
double sd[n];
// From NSMutableArray to sd[n];
for (int i=0; i<n; i++)
{
sd[i] = [[sdA objectAtIndex:i] doubleValue];
}
NSMutableArray *output = [NSMutableArray arrayWithCapacity:(n+1)];
for(int i=0; i<n-1 ; i++)
{
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
CGPoint cur = [[points objectAtIndex:i] CGPointValue];
CGPoint next = [[points objectAtIndex:(i+1)] CGPointValue];
#else
NSPoint cur = [[points objectAtIndex:i] pointValue];
NSPoint next = [[points objectAtIndex:(i+1)] pointValue];
#endif
for(int x=cur.x;x<(int)next.x;x++)
{
double t = (double)(x-cur.x)/(next.x-cur.x);
double a = 1-t;
double b = t;
double h = next.x-cur.x;
double y= a*cur.y + b*next.y + (h*h/6)*( (a*a*a-a)*sd[i]+ (b*b*b-b)*sd[i+1] );
if (y > 255.0)
{
y = 255.0;
}
else if (y < 0.0)
{
y = 0.0;
}
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
[output addObject:[NSValue valueWithCGPoint:CGPointMake(x, y)]];
#else
[output addObject:[NSValue valueWithPoint:NSMakePoint(x, y)]];
#endif
}
}
// The above always misses the last point because the last point is the last next, so we approach but don't equal it.
[output addObject:[points lastObject]];
return output;
}
- (NSMutableArray *)secondDerivative:(NSArray *)points
{
const NSInteger n = [points count];
if ((n <= 0) || (n == 1))
{
return nil;
}
double matrix[n][3];
double result[n];
matrix[0][1]=1;
// What about matrix[0][1] and matrix[0][0]? Assuming 0 for now (Brad L.)
matrix[0][0]=0;
matrix[0][2]=0;
for(int i=1;i<n-1;i++)
{
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
CGPoint P1 = [[points objectAtIndex:(i-1)] CGPointValue];
CGPoint P2 = [[points objectAtIndex:i] CGPointValue];
CGPoint P3 = [[points objectAtIndex:(i+1)] CGPointValue];
#else
NSPoint P1 = [[points objectAtIndex:(i-1)] pointValue];
NSPoint P2 = [[points objectAtIndex:i] pointValue];
NSPoint P3 = [[points objectAtIndex:(i+1)] pointValue];
#endif
matrix[i][0]=(double)(P2.x-P1.x)/6;
matrix[i][1]=(double)(P3.x-P1.x)/3;
matrix[i][2]=(double)(P3.x-P2.x)/6;
result[i]=(double)(P3.y-P2.y)/(P3.x-P2.x) - (double)(P2.y-P1.y)/(P2.x-P1.x);
}
// What about result[0] and result[n-1]? Assuming 0 for now (Brad L.)
result[0] = 0;
result[n-1] = 0;
matrix[n-1][1]=1;
// What about matrix[n-1][0] and matrix[n-1][2]? For now, assuming they are 0 (Brad L.)
matrix[n-1][0]=0;
matrix[n-1][2]=0;
// solving pass1 (up->down)
for(int i=1;i<n;i++)
{
double k = matrix[i][0]/matrix[i-1][1];
matrix[i][1] -= k*matrix[i-1][2];
matrix[i][0] = 0;
result[i] -= k*result[i-1];
}
// solving pass2 (down->up)
for(NSInteger i=n-2;i>=0;i--)
{
double k = matrix[i][2]/matrix[i+1][1];
matrix[i][1] -= k*matrix[i+1][0];
matrix[i][2] = 0;
result[i] -= k*result[i+1];
}
double y2[n];
for(int i=0;i<n;i++) y2[i]=result[i]/matrix[i][1];
NSMutableArray *output = [NSMutableArray arrayWithCapacity:n];
for (int i=0;i<n;i++)
{
[output addObject:[NSNumber numberWithDouble:y2[i]]];
}
return output;
}
- (void)updateToneCurveTexture;
{
runSynchronouslyOnVideoProcessingQueue(^{
[GPUImageContext useImageProcessingContext];
if (!toneCurveTexture)
{
glActiveTexture(GL_TEXTURE3);
glGenTextures(1, &toneCurveTexture);
glBindTexture(GL_TEXTURE_2D, toneCurveTexture);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
toneCurveByteArray = calloc(256 * 4, sizeof(GLubyte));
}
else
{
glActiveTexture(GL_TEXTURE3);
glBindTexture(GL_TEXTURE_2D, toneCurveTexture);
}
if ( ([_redCurve count] >= 256) && ([_greenCurve count] >= 256) && ([_blueCurve count] >= 256) && ([_rgbCompositeCurve count] >= 256))
{
for (unsigned int currentCurveIndex = 0; currentCurveIndex < 256; currentCurveIndex++)
{
// BGRA for upload to texture
GLubyte b = fmin(fmax(currentCurveIndex + [[_blueCurve objectAtIndex:currentCurveIndex] floatValue], 0), 255);
toneCurveByteArray[currentCurveIndex * 4] = fmin(fmax(b + [[_rgbCompositeCurve objectAtIndex:b] floatValue], 0), 255);
GLubyte g = fmin(fmax(currentCurveIndex + [[_greenCurve objectAtIndex:currentCurveIndex] floatValue], 0), 255);
toneCurveByteArray[currentCurveIndex * 4 + 1] = fmin(fmax(g + [[_rgbCompositeCurve objectAtIndex:g] floatValue], 0), 255);
GLubyte r = fmin(fmax(currentCurveIndex + [[_redCurve objectAtIndex:currentCurveIndex] floatValue], 0), 255);
toneCurveByteArray[currentCurveIndex * 4 + 2] = fmin(fmax(r + [[_rgbCompositeCurve objectAtIndex:r] floatValue], 0), 255);
toneCurveByteArray[currentCurveIndex * 4 + 3] = 255;
}
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, 256 /*width*/, 1 /*height*/, 0, GL_BGRA, GL_UNSIGNED_BYTE, toneCurveByteArray);
}
});
}
#pragma mark -
#pragma mark Rendering
- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;
{
if (self.preventRendering)
{
[firstInputFramebuffer unlock];
return;
}
[GPUImageContext setActiveShaderProgram:filterProgram];
outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:[self sizeOfFBO] textureOptions:self.outputTextureOptions onlyTexture:NO];
[outputFramebuffer activateFramebuffer];
if (usingNextFrameForImageCapture)
{
[outputFramebuffer lock];
}
glClearColor(backgroundColorRed, backgroundColorGreen, backgroundColorBlue, backgroundColorAlpha);
glClear(GL_COLOR_BUFFER_BIT);
glActiveTexture(GL_TEXTURE2);
glBindTexture(GL_TEXTURE_2D, [firstInputFramebuffer texture]);
glUniform1i(filterInputTextureUniform, 2);
glActiveTexture(GL_TEXTURE3);
glBindTexture(GL_TEXTURE_2D, toneCurveTexture);
glUniform1i(toneCurveTextureUniform, 3);
glVertexAttribPointer(filterPositionAttribute, 2, GL_FLOAT, 0, 0, vertices);
glVertexAttribPointer(filterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
[firstInputFramebuffer unlock];
if (usingNextFrameForImageCapture)
{
dispatch_semaphore_signal(imageCaptureSemaphore);
}
}
#pragma mark -
#pragma mark Accessors
- (void)setRGBControlPoints:(NSArray *)points
{
_redControlPoints = [points copy];
_redCurve = [self getPreparedSplineCurve:_redControlPoints];
_greenControlPoints = [points copy];
_greenCurve = [self getPreparedSplineCurve:_greenControlPoints];
_blueControlPoints = [points copy];
_blueCurve = [self getPreparedSplineCurve:_blueControlPoints];
[self updateToneCurveTexture];
}
- (void)setRgbCompositeControlPoints:(NSArray *)newValue
{
_rgbCompositeControlPoints = [newValue copy];
_rgbCompositeCurve = [self getPreparedSplineCurve:_rgbCompositeControlPoints];
[self updateToneCurveTexture];
}
- (void)setRedControlPoints:(NSArray *)newValue;
{
_redControlPoints = [newValue copy];
_redCurve = [self getPreparedSplineCurve:_redControlPoints];
[self updateToneCurveTexture];
}
- (void)setGreenControlPoints:(NSArray *)newValue
{
_greenControlPoints = [newValue copy];
_greenCurve = [self getPreparedSplineCurve:_greenControlPoints];
[self updateToneCurveTexture];
}
- (void)setBlueControlPoints:(NSArray *)newValue
{
_blueControlPoints = [newValue copy];
_blueCurve = [self getPreparedSplineCurve:_blueControlPoints];
[self updateToneCurveTexture];
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageToneCurveFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 5,181 |
```objective-c
#import "GPUImageVideoCamera.h"
void stillImageDataReleaseCallback(void *releaseRefCon, const void *baseAddress);
void GPUImageCreateResizedSampleBuffer(CVPixelBufferRef cameraFrame, CGSize finalSize, CMSampleBufferRef *sampleBuffer);
@interface GPUImageStillCamera : GPUImageVideoCamera
/** The JPEG compression quality to use when capturing a photo as a JPEG.
*/
@property CGFloat jpegCompressionQuality;
// Only reliably set inside the context of the completion handler of one of the capture methods
@property (readonly) NSDictionary *currentCaptureMetadata;
// Photography controls
- (void)capturePhotoAsSampleBufferWithCompletionHandler:(void (^)(CMSampleBufferRef imageSampleBuffer, NSError *error))block;
- (void)capturePhotoAsImageProcessedUpToFilter:(GPUImageOutput<GPUImageInput> *)finalFilterInChain withCompletionHandler:(void (^)(UIImage *processedImage, NSError *error))block;
- (void)capturePhotoAsImageProcessedUpToFilter:(GPUImageOutput<GPUImageInput> *)finalFilterInChain withOrientation:(UIImageOrientation)orientation withCompletionHandler:(void (^)(UIImage *processedImage, NSError *error))block;
- (void)capturePhotoAsJPEGProcessedUpToFilter:(GPUImageOutput<GPUImageInput> *)finalFilterInChain withCompletionHandler:(void (^)(NSData *processedJPEG, NSError *error))block;
- (void)capturePhotoAsJPEGProcessedUpToFilter:(GPUImageOutput<GPUImageInput> *)finalFilterInChain withOrientation:(UIImageOrientation)orientation withCompletionHandler:(void (^)(NSData *processedJPEG, NSError *error))block;
- (void)capturePhotoAsPNGProcessedUpToFilter:(GPUImageOutput<GPUImageInput> *)finalFilterInChain withCompletionHandler:(void (^)(NSData *processedPNG, NSError *error))block;
- (void)capturePhotoAsPNGProcessedUpToFilter:(GPUImageOutput<GPUImageInput> *)finalFilterInChain withOrientation:(UIImageOrientation)orientation withCompletionHandler:(void (^)(NSData *processedPNG, NSError *error))block;
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageStillCamera.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 429 |
```objective-c
#import "GPUImageSoftLightBlendFilter.h"
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageSoftLightBlendFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
varying highp vec2 textureCoordinate2;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
void main()
{
mediump vec4 base = texture2D(inputImageTexture, textureCoordinate);
mediump vec4 overlay = texture2D(inputImageTexture2, textureCoordinate2);
lowp float alphaDivisor = base.a + step(base.a, 0.0); // Protect against a divide-by-zero blacking out things in the output
gl_FragColor = base * (overlay.a * (base / alphaDivisor) + (2.0 * overlay * (1.0 - (base / alphaDivisor)))) + overlay * (1.0 - base.a) + base * (1.0 - overlay.a);
}
);
#else
NSString *const kGPUImageSoftLightBlendFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
varying vec2 textureCoordinate2;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
void main()
{
vec4 base = texture2D(inputImageTexture, textureCoordinate);
vec4 overlay = texture2D(inputImageTexture2, textureCoordinate2);
float alphaDivisor = base.a + step(base.a, 0.0); // Protect against a divide-by-zero blacking out things in the output
gl_FragColor = base * (overlay.a * (base / alphaDivisor) + (2.0 * overlay * (1.0 - (base / alphaDivisor)))) + overlay * (1.0 - base.a) + base * (1.0 - overlay.a);
}
);
#endif
@implementation GPUImageSoftLightBlendFilter
- (id)init;
{
if (!(self = [super initWithFragmentShaderFromString:kGPUImageSoftLightBlendFragmentShaderString]))
{
return nil;
}
return self;
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageSoftLightBlendFilter.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 453 |
```objective-c
//
// GPUImageMovieComposition.m
// Givit
//
// Created by Sean Meiners on 2013/01/25.
//
//
#import "GPUImageMovieComposition.h"
#import "GPUImageMovieWriter.h"
@implementation GPUImageMovieComposition
@synthesize compositon = _compositon;
@synthesize videoComposition = _videoComposition;
@synthesize audioMix = _audioMix;
- (id)initWithComposition:(AVComposition*)compositon
andVideoComposition:(AVVideoComposition*)videoComposition
andAudioMix:(AVAudioMix*)audioMix {
if (!(self = [super init]))
{
return nil;
}
[self yuvConversionSetup];
self.compositon = compositon;
self.videoComposition = videoComposition;
self.audioMix = audioMix;
return self;
}
- (AVAssetReader*)createAssetReader
{
//NSLog(@"creating reader from composition: %@, video: %@, audio: %@ with duration: %@", _compositon, _videoComposition, _audioMix, CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, _compositon.duration)));
NSError *error = nil;
AVAssetReader *assetReader = [AVAssetReader assetReaderWithAsset:self.compositon error:&error];
NSDictionary *outputSettings = @{(id)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)};
AVAssetReaderVideoCompositionOutput *readerVideoOutput = [AVAssetReaderVideoCompositionOutput assetReaderVideoCompositionOutputWithVideoTracks:[_compositon tracksWithMediaType:AVMediaTypeVideo]
videoSettings:outputSettings];
#if ! TARGET_IPHONE_SIMULATOR
if( [_videoComposition isKindOfClass:[AVMutableVideoComposition class]] )
[(AVMutableVideoComposition*)_videoComposition setRenderScale:1.0];
#endif
readerVideoOutput.videoComposition = self.videoComposition;
readerVideoOutput.alwaysCopiesSampleData = NO;
[assetReader addOutput:readerVideoOutput];
NSArray *audioTracks = [_compositon tracksWithMediaType:AVMediaTypeAudio];
BOOL shouldRecordAudioTrack = (([audioTracks count] > 0) && (self.audioEncodingTarget != nil) );
AVAssetReaderAudioMixOutput *readerAudioOutput = nil;
if (shouldRecordAudioTrack)
{
[self.audioEncodingTarget setShouldInvalidateAudioSampleWhenDone:YES];
readerAudioOutput = [AVAssetReaderAudioMixOutput assetReaderAudioMixOutputWithAudioTracks:audioTracks audioSettings:nil];
readerAudioOutput.audioMix = self.audioMix;
readerAudioOutput.alwaysCopiesSampleData = NO;
[assetReader addOutput:readerAudioOutput];
}
return assetReader;
}
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageMovieComposition.m | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 597 |
```objective-c
#import "GPUImageFilter.h"
@interface GPUImagePixellatePositionFilter : GPUImageFilter
{
GLint fractionalWidthOfAPixelUniform, aspectRatioUniform, centerUniform, radiusUniform;
}
// The fractional width of the image to use as a size for the pixels in the resulting image. Values below one pixel width in the source image are ignored.
@property(readwrite, nonatomic) CGFloat fractionalWidthOfAPixel;
// the center point to start pixelation in texture coordinates, default 0.5, 0.5
@property(readwrite, nonatomic) CGPoint center;
// the radius (0.0 - 1.0) in which to pixelate, default 1.0
@property(readwrite, nonatomic) CGFloat radius;
@end
``` | /content/code_sandbox/Pods/GPUImage/framework/Source/GPUImagePixellatePositionFilter.h | objective-c | 2016-02-02T02:51:55 | 2024-08-09T08:55:27 | WMPlayer | zhengwenming/WMPlayer | 3,272 | 155 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.