text
stringlengths 9
39.2M
| dir
stringlengths 25
226
| lang
stringclasses 163
values | created_date
timestamp[s] | updated_date
timestamp[s] | repo_name
stringclasses 751
values | repo_full_name
stringclasses 752
values | star
int64 1.01k
183k
| len_tokens
int64 1
18.5M
|
|---|---|---|---|---|---|---|---|---|
```objective-c
#import "GPUImageMonochromeFilter.h"
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUMonochromeFragmentShaderString = SHADER_STRING
(
precision lowp float;
varying highp vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform float intensity;
uniform vec3 filterColor;
const mediump vec3 luminanceWeighting = vec3(0.2125, 0.7154, 0.0721);
void main()
{
//desat, then apply overlay blend
lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
float luminance = dot(textureColor.rgb, luminanceWeighting);
lowp vec4 desat = vec4(vec3(luminance), 1.0);
//overlay
lowp vec4 outputColor = vec4(
(desat.r < 0.5 ? (2.0 * desat.r * filterColor.r) : (1.0 - 2.0 * (1.0 - desat.r) * (1.0 - filterColor.r))),
(desat.g < 0.5 ? (2.0 * desat.g * filterColor.g) : (1.0 - 2.0 * (1.0 - desat.g) * (1.0 - filterColor.g))),
(desat.b < 0.5 ? (2.0 * desat.b * filterColor.b) : (1.0 - 2.0 * (1.0 - desat.b) * (1.0 - filterColor.b))),
1.0
);
//which is better, or are they equal?
gl_FragColor = vec4( mix(textureColor.rgb, outputColor.rgb, intensity), textureColor.a);
}
);
#else
NSString *const kGPUMonochromeFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform float intensity;
uniform vec3 filterColor;
const vec3 luminanceWeighting = vec3(0.2125, 0.7154, 0.0721);
void main()
{
//desat, then apply overlay blend
vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
float luminance = dot(textureColor.rgb, luminanceWeighting);
vec4 desat = vec4(vec3(luminance), 1.0);
//overlay
vec4 outputColor = vec4(
(desat.r < 0.5 ? (2.0 * desat.r * filterColor.r) : (1.0 - 2.0 * (1.0 - desat.r) * (1.0 - filterColor.r))),
(desat.g < 0.5 ? (2.0 * desat.g * filterColor.g) : (1.0 - 2.0 * (1.0 - desat.g) * (1.0 - filterColor.g))),
(desat.b < 0.5 ? (2.0 * desat.b * filterColor.b) : (1.0 - 2.0 * (1.0 - desat.b) * (1.0 - filterColor.b))),
1.0
);
//which is better, or are they equal?
gl_FragColor = vec4( mix(textureColor.rgb, outputColor.rgb, intensity), textureColor.a);
}
);
#endif
@implementation GPUImageMonochromeFilter
@synthesize intensity = _intensity;
@synthesize color = _color;
- (id)init;
{
if (!(self = [super initWithFragmentShaderFromString:kGPUMonochromeFragmentShaderString]))
{
return nil;
}
intensityUniform = [filterProgram uniformIndex:@"intensity"];
filterColorUniform = [filterProgram uniformIndex:@"filterColor"];
self.intensity = 1.0;
self.color = (GPUVector4){0.6f, 0.45f, 0.3f, 1.f};
//self.color = [CIColor colorWithRed:0.6 green:0.45 blue:0.3 alpha:1.];
return self;
}
#pragma mark -
#pragma mark Accessors
- (void)setColor:(GPUVector4)color;
{
_color = color;
[self setColorRed:color.one green:color.two blue:color.three];
}
- (void)setColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent;
{
GPUVector3 filterColor = {redComponent, greenComponent, blueComponent};
[self setVec3:filterColor forUniform:filterColorUniform program:filterProgram];
}
- (void)setIntensity:(CGFloat)newValue;
{
_intensity = newValue;
[self setFloat:_intensity forUniform:intensityUniform program:filterProgram];
}
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageMonochromeFilter.m
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 1,064
|
```objective-c
#import <Foundation/Foundation.h>
#import <AVFoundation/AVFoundation.h>
#import <CoreMedia/CoreMedia.h>
#import "GPUImageContext.h"
#import "GPUImageOutput.h"
extern const GLfloat kColorConversion601[];
extern const GLfloat kColorConversion601FullRange[];
extern const GLfloat kColorConversion709[];
extern NSString *const kGPUImageYUVVideoRangeConversionForRGFragmentShaderString;
extern NSString *const kGPUImageYUVFullRangeConversionForLAFragmentShaderString;
extern NSString *const kGPUImageYUVVideoRangeConversionForLAFragmentShaderString;
//Delegate Protocal for Face Detection.
@protocol GPUImageVideoCameraDelegate <NSObject>
@optional
- (void)willOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer;
@end
/**
A GPUImageOutput that provides frames from either camera
*/
@interface GPUImageVideoCamera : GPUImageOutput <AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate>
{
NSUInteger numberOfFramesCaptured;
CGFloat totalFrameTimeDuringCapture;
AVCaptureSession *_captureSession;
AVCaptureDevice *_inputCamera;
AVCaptureDevice *_microphone;
AVCaptureDeviceInput *videoInput;
AVCaptureVideoDataOutput *videoOutput;
BOOL capturePaused;
GPUImageRotationMode outputRotation, internalRotation;
dispatch_semaphore_t frameRenderingSemaphore;
BOOL captureAsYUV;
GLuint luminanceTexture, chrominanceTexture;
__unsafe_unretained id<GPUImageVideoCameraDelegate> _delegate;
}
/// The AVCaptureSession used to capture from the camera
@property(readonly, retain, nonatomic) AVCaptureSession *captureSession;
/// This enables the capture session preset to be changed on the fly
@property (readwrite, nonatomic, copy) NSString *captureSessionPreset;
/// This sets the frame rate of the camera (iOS 5 and above only)
/**
Setting this to 0 or below will set the frame rate back to the default setting for a particular preset.
*/
@property (readwrite) int32_t frameRate;
/// Easy way to tell which cameras are present on device
@property (readonly, getter = isFrontFacingCameraPresent) BOOL frontFacingCameraPresent;
@property (readonly, getter = isBackFacingCameraPresent) BOOL backFacingCameraPresent;
/// This enables the benchmarking mode, which logs out instantaneous and average frame times to the console
@property(readwrite, nonatomic) BOOL runBenchmark;
/// Use this property to manage camera settings. Focus point, exposure point, etc.
@property(readonly) AVCaptureDevice *inputCamera;
/// This determines the rotation applied to the output image, based on the source material
@property(readwrite, nonatomic) UIInterfaceOrientation outputImageOrientation;
/// These properties determine whether or not the two camera orientations should be mirrored. By default, both are NO.
@property(readwrite, nonatomic) BOOL horizontallyMirrorFrontFacingCamera, horizontallyMirrorRearFacingCamera;
@property(nonatomic, assign) id<GPUImageVideoCameraDelegate> delegate;
/// @name Initialization and teardown
/** Begin a capture session
See AVCaptureSession for acceptable values
@param sessionPreset Session preset to use
@param cameraPosition Camera to capture from
*/
- (id)initWithSessionPreset:(NSString *)sessionPreset cameraPosition:(AVCaptureDevicePosition)cameraPosition;
/** Add audio capture to the session. Adding inputs and outputs freezes the capture session momentarily, so you
can use this method to add the audio inputs and outputs early, if you're going to set the audioEncodingTarget
later. Returns YES is the audio inputs and outputs were added, or NO if they had already been added.
*/
- (BOOL)addAudioInputsAndOutputs;
/** Remove the audio capture inputs and outputs from this session. Returns YES if the audio inputs and outputs
were removed, or NO is they hadn't already been added.
*/
- (BOOL)removeAudioInputsAndOutputs;
/** Tear down the capture session
*/
- (void)removeInputsAndOutputs;
/// @name Manage the camera video stream
/** Start camera capturing
*/
- (void)startCameraCapture;
/** Stop camera capturing
*/
- (void)stopCameraCapture;
/** Pause camera capturing
*/
- (void)pauseCameraCapture;
/** Resume camera capturing
*/
- (void)resumeCameraCapture;
/** Process a video sample
@param sampleBuffer Buffer to process
*/
- (void)processVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer;
/** Process an audio sample
@param sampleBuffer Buffer to process
*/
- (void)processAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer;
/** Get the position (front, rear) of the source camera
*/
- (AVCaptureDevicePosition)cameraPosition;
/** Get the AVCaptureConnection of the source camera
*/
- (AVCaptureConnection *)videoCaptureConnection;
/** This flips between the front and rear cameras
*/
- (void)rotateCamera;
/// @name Benchmarking
/** When benchmarking is enabled, this will keep a running average of the time from uploading, processing, and final recording or display
*/
- (CGFloat)averageFrameDurationDuringCapture;
- (void)resetBenchmarkAverage;
+ (BOOL)isBackFacingCameraPresent;
+ (BOOL)isFrontFacingCameraPresent;
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageVideoCamera.h
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 1,086
|
```objective-c
#import "GPUImageFilter.h"
@interface GPUImageCGAColorspaceFilter : GPUImageFilter
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageCGAColorspaceFilter.h
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 23
|
```objective-c
#import "GPUImagePolkaDotFilter.h"
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImagePolkaDotFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform highp float fractionalWidthOfPixel;
uniform highp float aspectRatio;
uniform highp float dotScaling;
void main()
{
highp vec2 sampleDivisor = vec2(fractionalWidthOfPixel, fractionalWidthOfPixel / aspectRatio);
highp vec2 samplePos = textureCoordinate - mod(textureCoordinate, sampleDivisor) + 0.5 * sampleDivisor;
highp vec2 textureCoordinateToUse = vec2(textureCoordinate.x, (textureCoordinate.y * aspectRatio + 0.5 - 0.5 * aspectRatio));
highp vec2 adjustedSamplePos = vec2(samplePos.x, (samplePos.y * aspectRatio + 0.5 - 0.5 * aspectRatio));
highp float distanceFromSamplePoint = distance(adjustedSamplePos, textureCoordinateToUse);
lowp float checkForPresenceWithinDot = step(distanceFromSamplePoint, (fractionalWidthOfPixel * 0.5) * dotScaling);
lowp vec4 inputColor = texture2D(inputImageTexture, samplePos);
gl_FragColor = vec4(inputColor.rgb * checkForPresenceWithinDot, inputColor.a);
}
);
#else
NSString *const kGPUImagePolkaDotFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform float fractionalWidthOfPixel;
uniform float aspectRatio;
uniform float dotScaling;
void main()
{
vec2 sampleDivisor = vec2(fractionalWidthOfPixel, fractionalWidthOfPixel / aspectRatio);
vec2 samplePos = textureCoordinate - mod(textureCoordinate, sampleDivisor) + 0.5 * sampleDivisor;
vec2 textureCoordinateToUse = vec2(textureCoordinate.x, (textureCoordinate.y * aspectRatio + 0.5 - 0.5 * aspectRatio));
vec2 adjustedSamplePos = vec2(samplePos.x, (samplePos.y * aspectRatio + 0.5 - 0.5 * aspectRatio));
float distanceFromSamplePoint = distance(adjustedSamplePos, textureCoordinateToUse);
float checkForPresenceWithinDot = step(distanceFromSamplePoint, (fractionalWidthOfPixel * 0.5) * dotScaling);
vec4 inputColor = texture2D(inputImageTexture, samplePos);
gl_FragColor = vec4(inputColor.rgb * checkForPresenceWithinDot, inputColor.a);
}
);
#endif
@implementation GPUImagePolkaDotFilter
@synthesize dotScaling = _dotScaling;
- (id)init;
{
if (!(self = [super initWithFragmentShaderFromString:kGPUImagePolkaDotFragmentShaderString]))
{
return nil;
}
dotScalingUniform = [filterProgram uniformIndex:@"dotScaling"];
self.dotScaling = 0.90;
return self;
}
#pragma mark -
#pragma mark Accessors
- (void)setDotScaling:(CGFloat)newValue;
{
_dotScaling = newValue;
[self setFloat:_dotScaling forUniform:dotScalingUniform program:filterProgram];
}
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImagePolkaDotFilter.m
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 713
|
```objective-c
#import "GPUImageFilter.h"
@interface GPUImageTransformFilter : GPUImageFilter
{
GLint transformMatrixUniform, orthographicMatrixUniform;
GPUMatrix4x4 orthographicMatrix;
}
// You can either set the transform to apply to be a 2-D affine transform or a 3-D transform. The default is the identity transform (the output image is identical to the input).
@property(readwrite, nonatomic) CGAffineTransform affineTransform;
@property(readwrite, nonatomic) CATransform3D transform3D;
// This applies the transform to the raw frame data if set to YES, the default of NO takes the aspect ratio of the image input into account when rotating
@property(readwrite, nonatomic) BOOL ignoreAspectRatio;
// sets the anchor point to top left corner
@property(readwrite, nonatomic) BOOL anchorTopLeft;
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageTransformFilter.h
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 172
|
```objective-c
#import "GPUImageLinearBurnBlendFilter.h"
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageLinearBurnBlendFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
varying highp vec2 textureCoordinate2;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
void main()
{
mediump vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
mediump vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);
gl_FragColor = vec4(clamp(textureColor.rgb + textureColor2.rgb - vec3(1.0), vec3(0.0), vec3(1.0)), textureColor.a);
}
);
#else
NSString *const kGPUImageLinearBurnBlendFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
varying vec2 textureCoordinate2;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
void main()
{
vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);
gl_FragColor = vec4(clamp(textureColor.rgb + textureColor2.rgb - vec3(1.0), vec3(0.0), vec3(1.0)), textureColor.a);
}
);
#endif
@implementation GPUImageLinearBurnBlendFilter
- (id)init;
{
if (!(self = [super initWithFragmentShaderFromString:kGPUImageLinearBurnBlendFragmentShaderString]))
{
return nil;
}
return self;
}
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageLinearBurnBlendFilter.m
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 358
|
```objective-c
#import "GPUImage3x3TextureSamplingFilter.h"
@interface GPUImageMedianFilter : GPUImage3x3TextureSamplingFilter
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageMedianFilter.h
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 30
|
```objective-c
#import "GPUImageMovie.h"
#import "GPUImageMovieWriter.h"
#import "GPUImageFilter.h"
#import "GPUImageVideoCamera.h"
@interface GPUImageMovie () <AVPlayerItemOutputPullDelegate>
{
BOOL audioEncodingIsFinished, videoEncodingIsFinished;
GPUImageMovieWriter *synchronizedMovieWriter;
AVAssetReader *reader;
AVPlayerItemVideoOutput *playerItemOutput;
CADisplayLink *displayLink;
CMTime previousFrameTime, processingFrameTime;
CFAbsoluteTime previousActualFrameTime;
BOOL keepLooping;
GLuint luminanceTexture, chrominanceTexture;
GLProgram *yuvConversionProgram;
GLint yuvConversionPositionAttribute, yuvConversionTextureCoordinateAttribute;
GLint yuvConversionLuminanceTextureUniform, yuvConversionChrominanceTextureUniform;
GLint yuvConversionMatrixUniform;
const GLfloat *_preferredConversion;
BOOL isFullYUVRange;
int imageBufferWidth, imageBufferHeight;
}
- (void)processAsset;
@end
@implementation GPUImageMovie
@synthesize url = _url;
@synthesize asset = _asset;
@synthesize runBenchmark = _runBenchmark;
@synthesize playAtActualSpeed = _playAtActualSpeed;
@synthesize delegate = _delegate;
@synthesize shouldRepeat = _shouldRepeat;
#pragma mark -
#pragma mark Initialization and teardown
- (id)initWithURL:(NSURL *)url;
{
if (!(self = [super init]))
{
return nil;
}
[self yuvConversionSetup];
self.url = url;
self.asset = nil;
return self;
}
- (id)initWithAsset:(AVAsset *)asset;
{
if (!(self = [super init]))
{
return nil;
}
[self yuvConversionSetup];
self.url = nil;
self.asset = asset;
return self;
}
- (id)initWithPlayerItem:(AVPlayerItem *)playerItem;
{
if (!(self = [super init]))
{
return nil;
}
[self yuvConversionSetup];
self.url = nil;
self.asset = nil;
self.playerItem = playerItem;
return self;
}
- (void)yuvConversionSetup;
{
if ([GPUImageContext supportsFastTextureUpload])
{
runSynchronouslyOnVideoProcessingQueue(^{
[GPUImageContext useImageProcessingContext];
_preferredConversion = kColorConversion709;
isFullYUVRange = YES;
yuvConversionProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImageYUVFullRangeConversionForLAFragmentShaderString];
if (!yuvConversionProgram.initialized)
{
[yuvConversionProgram addAttribute:@"position"];
[yuvConversionProgram addAttribute:@"inputTextureCoordinate"];
if (![yuvConversionProgram link])
{
NSString *progLog = [yuvConversionProgram programLog];
NSLog(@"Program link log: %@", progLog);
NSString *fragLog = [yuvConversionProgram fragmentShaderLog];
NSLog(@"Fragment shader compile log: %@", fragLog);
NSString *vertLog = [yuvConversionProgram vertexShaderLog];
NSLog(@"Vertex shader compile log: %@", vertLog);
yuvConversionProgram = nil;
NSAssert(NO, @"Filter shader link failed");
}
}
yuvConversionPositionAttribute = [yuvConversionProgram attributeIndex:@"position"];
yuvConversionTextureCoordinateAttribute = [yuvConversionProgram attributeIndex:@"inputTextureCoordinate"];
yuvConversionLuminanceTextureUniform = [yuvConversionProgram uniformIndex:@"luminanceTexture"];
yuvConversionChrominanceTextureUniform = [yuvConversionProgram uniformIndex:@"chrominanceTexture"];
yuvConversionMatrixUniform = [yuvConversionProgram uniformIndex:@"colorConversionMatrix"];
[GPUImageContext setActiveShaderProgram:yuvConversionProgram];
glEnableVertexAttribArray(yuvConversionPositionAttribute);
glEnableVertexAttribArray(yuvConversionTextureCoordinateAttribute);
});
}
}
- (void)dealloc
{
// Moved into endProcessing
//if (self.playerItem && (displayLink != nil))
//{
// [displayLink invalidate]; // remove from all run loops
// displayLink = nil;
//}
}
#pragma mark -
#pragma mark Movie processing
- (void)enableSynchronizedEncodingUsingMovieWriter:(GPUImageMovieWriter *)movieWriter;
{
synchronizedMovieWriter = movieWriter;
movieWriter.encodingLiveVideo = NO;
}
- (void)startProcessing
{
if( self.playerItem ) {
[self processPlayerItem];
return;
}
if(self.url == nil)
{
[self processAsset];
return;
}
if (_shouldRepeat) keepLooping = YES;
previousFrameTime = kCMTimeZero;
previousActualFrameTime = CFAbsoluteTimeGetCurrent();
NSDictionary *inputOptions = [NSDictionary dictionaryWithObject:[NSNumber numberWithBool:YES] forKey:AVURLAssetPreferPreciseDurationAndTimingKey];
AVURLAsset *inputAsset = [[AVURLAsset alloc] initWithURL:self.url options:inputOptions];
GPUImageMovie __block *blockSelf = self;
[inputAsset loadValuesAsynchronouslyForKeys:[NSArray arrayWithObject:@"tracks"] completionHandler: ^{
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
NSError *error = nil;
AVKeyValueStatus tracksStatus = [inputAsset statusOfValueForKey:@"tracks" error:&error];
if (tracksStatus != AVKeyValueStatusLoaded)
{
return;
}
blockSelf.asset = inputAsset;
[blockSelf processAsset];
blockSelf = nil;
});
}];
}
- (AVAssetReader*)createAssetReader
{
NSError *error = nil;
AVAssetReader *assetReader = [AVAssetReader assetReaderWithAsset:self.asset error:&error];
NSMutableDictionary *outputSettings = [NSMutableDictionary dictionary];
if ([GPUImageContext supportsFastTextureUpload]) {
[outputSettings setObject:@(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) forKey:(id)kCVPixelBufferPixelFormatTypeKey];
isFullYUVRange = YES;
}
else {
[outputSettings setObject:@(kCVPixelFormatType_32BGRA) forKey:(id)kCVPixelBufferPixelFormatTypeKey];
isFullYUVRange = NO;
}
// Maybe set alwaysCopiesSampleData to NO on iOS 5.0 for faster video decoding
AVAssetReaderTrackOutput *readerVideoTrackOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:[[self.asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] outputSettings:outputSettings];
readerVideoTrackOutput.alwaysCopiesSampleData = NO;
[assetReader addOutput:readerVideoTrackOutput];
NSArray *audioTracks = [self.asset tracksWithMediaType:AVMediaTypeAudio];
BOOL shouldRecordAudioTrack = (([audioTracks count] > 0) && (self.audioEncodingTarget != nil) );
AVAssetReaderTrackOutput *readerAudioTrackOutput = nil;
if (shouldRecordAudioTrack)
{
[self.audioEncodingTarget setShouldInvalidateAudioSampleWhenDone:YES];
// This might need to be extended to handle movies with more than one audio track
AVAssetTrack* audioTrack = [audioTracks objectAtIndex:0];
readerAudioTrackOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audioTrack outputSettings:nil];
readerAudioTrackOutput.alwaysCopiesSampleData = NO;
[assetReader addOutput:readerAudioTrackOutput];
}
return assetReader;
}
- (void)processAsset
{
reader = [self createAssetReader];
AVAssetReaderOutput *readerVideoTrackOutput = nil;
AVAssetReaderOutput *readerAudioTrackOutput = nil;
audioEncodingIsFinished = YES;
for( AVAssetReaderOutput *output in reader.outputs ) {
if( [output.mediaType isEqualToString:AVMediaTypeAudio] ) {
audioEncodingIsFinished = NO;
readerAudioTrackOutput = output;
}
else if( [output.mediaType isEqualToString:AVMediaTypeVideo] ) {
readerVideoTrackOutput = output;
}
}
if ([reader startReading] == NO)
{
NSLog(@"Error reading from file at URL: %@", self.url);
return;
}
__unsafe_unretained GPUImageMovie *weakSelf = self;
if (synchronizedMovieWriter != nil)
{
[synchronizedMovieWriter setVideoInputReadyCallback:^{
return [weakSelf readNextVideoFrameFromOutput:readerVideoTrackOutput];
}];
[synchronizedMovieWriter setAudioInputReadyCallback:^{
return [weakSelf readNextAudioSampleFromOutput:readerAudioTrackOutput];
}];
[synchronizedMovieWriter enableSynchronizationCallbacks];
}
else
{
while (reader.status == AVAssetReaderStatusReading && (!_shouldRepeat || keepLooping))
{
[weakSelf readNextVideoFrameFromOutput:readerVideoTrackOutput];
if ( (readerAudioTrackOutput) && (!audioEncodingIsFinished) )
{
[weakSelf readNextAudioSampleFromOutput:readerAudioTrackOutput];
}
}
if (reader.status == AVAssetReaderStatusCompleted) {
[reader cancelReading];
if (keepLooping) {
reader = nil;
dispatch_async(dispatch_get_main_queue(), ^{
[self startProcessing];
});
} else {
[weakSelf endProcessing];
}
}
}
}
- (void)processPlayerItem
{
runSynchronouslyOnVideoProcessingQueue(^{
displayLink = [CADisplayLink displayLinkWithTarget:self selector:@selector(displayLinkCallback:)];
[displayLink addToRunLoop:[NSRunLoop currentRunLoop] forMode:NSRunLoopCommonModes];
[displayLink setPaused:YES];
dispatch_queue_t videoProcessingQueue = [GPUImageContext sharedContextQueue];
NSMutableDictionary *pixBuffAttributes = [NSMutableDictionary dictionary];
if ([GPUImageContext supportsFastTextureUpload]) {
[pixBuffAttributes setObject:@(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) forKey:(id)kCVPixelBufferPixelFormatTypeKey];
}
else {
[pixBuffAttributes setObject:@(kCVPixelFormatType_32BGRA) forKey:(id)kCVPixelBufferPixelFormatTypeKey];
}
playerItemOutput = [[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:pixBuffAttributes];
[playerItemOutput setDelegate:self queue:videoProcessingQueue];
[_playerItem addOutput:playerItemOutput];
[playerItemOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:0.1];
});
}
- (void)outputMediaDataWillChange:(AVPlayerItemOutput *)sender
{
// Restart display link.
[displayLink setPaused:NO];
}
- (void)displayLinkCallback:(CADisplayLink *)sender
{
/*
The callback gets called once every Vsync.
Using the display link's timestamp and duration we can compute the next time the screen will be refreshed, and copy the pixel buffer for that time
This pixel buffer can then be processed and later rendered on screen.
*/
// Calculate the nextVsync time which is when the screen will be refreshed next.
CFTimeInterval nextVSync = ([sender timestamp] + [sender duration]);
CMTime outputItemTime = [playerItemOutput itemTimeForHostTime:nextVSync];
if ([playerItemOutput hasNewPixelBufferForItemTime:outputItemTime]) {
__unsafe_unretained GPUImageMovie *weakSelf = self;
CVPixelBufferRef pixelBuffer = [playerItemOutput copyPixelBufferForItemTime:outputItemTime itemTimeForDisplay:NULL];
if( pixelBuffer )
runSynchronouslyOnVideoProcessingQueue(^{
[weakSelf processMovieFrame:pixelBuffer withSampleTime:outputItemTime];
CFRelease(pixelBuffer);
});
}
}
- (BOOL)readNextVideoFrameFromOutput:(AVAssetReaderOutput *)readerVideoTrackOutput;
{
if (reader.status == AVAssetReaderStatusReading && ! videoEncodingIsFinished)
{
CMSampleBufferRef sampleBufferRef = [readerVideoTrackOutput copyNextSampleBuffer];
if (sampleBufferRef)
{
//NSLog(@"read a video frame: %@", CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, CMSampleBufferGetOutputPresentationTimeStamp(sampleBufferRef))));
if (_playAtActualSpeed)
{
// Do this outside of the video processing queue to not slow that down while waiting
CMTime currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBufferRef);
CMTime differenceFromLastFrame = CMTimeSubtract(currentSampleTime, previousFrameTime);
CFAbsoluteTime currentActualTime = CFAbsoluteTimeGetCurrent();
CGFloat frameTimeDifference = CMTimeGetSeconds(differenceFromLastFrame);
CGFloat actualTimeDifference = currentActualTime - previousActualFrameTime;
if (frameTimeDifference > actualTimeDifference)
{
usleep(1000000.0 * (frameTimeDifference - actualTimeDifference));
}
previousFrameTime = currentSampleTime;
previousActualFrameTime = CFAbsoluteTimeGetCurrent();
}
__unsafe_unretained GPUImageMovie *weakSelf = self;
runSynchronouslyOnVideoProcessingQueue(^{
[weakSelf processMovieFrame:sampleBufferRef];
CMSampleBufferInvalidate(sampleBufferRef);
CFRelease(sampleBufferRef);
});
return YES;
}
else
{
if (!keepLooping) {
videoEncodingIsFinished = YES;
if( videoEncodingIsFinished && audioEncodingIsFinished )
[self endProcessing];
}
}
}
else if (synchronizedMovieWriter != nil)
{
if (reader.status == AVAssetReaderStatusCompleted)
{
[self endProcessing];
}
}
return NO;
}
- (BOOL)readNextAudioSampleFromOutput:(AVAssetReaderOutput *)readerAudioTrackOutput;
{
if (reader.status == AVAssetReaderStatusReading && ! audioEncodingIsFinished)
{
CMSampleBufferRef audioSampleBufferRef = [readerAudioTrackOutput copyNextSampleBuffer];
if (audioSampleBufferRef)
{
//NSLog(@"read an audio frame: %@", CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, CMSampleBufferGetOutputPresentationTimeStamp(audioSampleBufferRef))));
[self.audioEncodingTarget processAudioBuffer:audioSampleBufferRef];
CFRelease(audioSampleBufferRef);
return YES;
}
else
{
if (!keepLooping) {
audioEncodingIsFinished = YES;
if( videoEncodingIsFinished && audioEncodingIsFinished )
[self endProcessing];
}
}
}
else if (synchronizedMovieWriter != nil)
{
if (reader.status == AVAssetReaderStatusCompleted || reader.status == AVAssetReaderStatusFailed ||
reader.status == AVAssetReaderStatusCancelled)
{
[self endProcessing];
}
}
return NO;
}
- (void)processMovieFrame:(CMSampleBufferRef)movieSampleBuffer;
{
// CMTimeGetSeconds
// CMTimeSubtract
CMTime currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(movieSampleBuffer);
CVImageBufferRef movieFrame = CMSampleBufferGetImageBuffer(movieSampleBuffer);
processingFrameTime = currentSampleTime;
[self processMovieFrame:movieFrame withSampleTime:currentSampleTime];
}
- (float)progress
{
if ( AVAssetReaderStatusReading == reader.status )
{
float current = processingFrameTime.value * 1.0f / processingFrameTime.timescale;
float duration = self.asset.duration.value * 1.0f / self.asset.duration.timescale;
return current / duration;
}
else if ( AVAssetReaderStatusCompleted == reader.status )
{
return 1.f;
}
else
{
return 0.f;
}
}
- (void)processMovieFrame:(CVPixelBufferRef)movieFrame withSampleTime:(CMTime)currentSampleTime
{
int bufferHeight = (int) CVPixelBufferGetHeight(movieFrame);
int bufferWidth = (int) CVPixelBufferGetWidth(movieFrame);
CFTypeRef colorAttachments = CVBufferGetAttachment(movieFrame, kCVImageBufferYCbCrMatrixKey, NULL);
if (colorAttachments != NULL)
{
if(CFStringCompare(colorAttachments, kCVImageBufferYCbCrMatrix_ITU_R_601_4, 0) == kCFCompareEqualTo)
{
if (isFullYUVRange)
{
_preferredConversion = kColorConversion601FullRange;
}
else
{
_preferredConversion = kColorConversion601;
}
}
else
{
_preferredConversion = kColorConversion709;
}
}
else
{
if (isFullYUVRange)
{
_preferredConversion = kColorConversion601FullRange;
}
else
{
_preferredConversion = kColorConversion601;
}
}
CFAbsoluteTime startTime = CFAbsoluteTimeGetCurrent();
// Fix issue 1580
[GPUImageContext useImageProcessingContext];
if ([GPUImageContext supportsFastTextureUpload])
{
CVOpenGLESTextureRef luminanceTextureRef = NULL;
CVOpenGLESTextureRef chrominanceTextureRef = NULL;
// if (captureAsYUV && [GPUImageContext deviceSupportsRedTextures])
if (CVPixelBufferGetPlaneCount(movieFrame) > 0) // Check for YUV planar inputs to do RGB conversion
{
if ( (imageBufferWidth != bufferWidth) && (imageBufferHeight != bufferHeight) )
{
imageBufferWidth = bufferWidth;
imageBufferHeight = bufferHeight;
}
CVReturn err;
// Y-plane
glActiveTexture(GL_TEXTURE4);
if ([GPUImageContext deviceSupportsRedTextures])
{
err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], movieFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE, bufferWidth, bufferHeight, GL_LUMINANCE, GL_UNSIGNED_BYTE, 0, &luminanceTextureRef);
}
else
{
err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], movieFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE, bufferWidth, bufferHeight, GL_LUMINANCE, GL_UNSIGNED_BYTE, 0, &luminanceTextureRef);
}
if (err)
{
NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err);
}
luminanceTexture = CVOpenGLESTextureGetName(luminanceTextureRef);
glBindTexture(GL_TEXTURE_2D, luminanceTexture);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
// UV-plane
glActiveTexture(GL_TEXTURE5);
if ([GPUImageContext deviceSupportsRedTextures])
{
err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], movieFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE_ALPHA, bufferWidth/2, bufferHeight/2, GL_LUMINANCE_ALPHA, GL_UNSIGNED_BYTE, 1, &chrominanceTextureRef);
}
else
{
err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], movieFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE_ALPHA, bufferWidth/2, bufferHeight/2, GL_LUMINANCE_ALPHA, GL_UNSIGNED_BYTE, 1, &chrominanceTextureRef);
}
if (err)
{
NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err);
}
chrominanceTexture = CVOpenGLESTextureGetName(chrominanceTextureRef);
glBindTexture(GL_TEXTURE_2D, chrominanceTexture);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
// if (!allTargetsWantMonochromeData)
// {
[self convertYUVToRGBOutput];
// }
for (id<GPUImageInput> currentTarget in targets)
{
NSInteger indexOfObject = [targets indexOfObject:currentTarget];
NSInteger targetTextureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
[currentTarget setInputSize:CGSizeMake(bufferWidth, bufferHeight) atIndex:targetTextureIndex];
[currentTarget setInputFramebuffer:outputFramebuffer atIndex:targetTextureIndex];
}
[outputFramebuffer unlock];
for (id<GPUImageInput> currentTarget in targets)
{
NSInteger indexOfObject = [targets indexOfObject:currentTarget];
NSInteger targetTextureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
[currentTarget newFrameReadyAtTime:currentSampleTime atIndex:targetTextureIndex];
}
CVPixelBufferUnlockBaseAddress(movieFrame, 0);
CFRelease(luminanceTextureRef);
CFRelease(chrominanceTextureRef);
}
else
{
// TODO: Mesh this with the new framebuffer cache
// CVPixelBufferLockBaseAddress(movieFrame, 0);
//
// CVReturn err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, coreVideoTextureCache, movieFrame, NULL, GL_TEXTURE_2D, GL_RGBA, bufferWidth, bufferHeight, GL_BGRA, GL_UNSIGNED_BYTE, 0, &texture);
//
// if (!texture || err) {
// NSLog(@"Movie CVOpenGLESTextureCacheCreateTextureFromImage failed (error: %d)", err);
// NSAssert(NO, @"Camera failure");
// return;
// }
//
// outputTexture = CVOpenGLESTextureGetName(texture);
// // glBindTexture(CVOpenGLESTextureGetTarget(texture), outputTexture);
// glBindTexture(GL_TEXTURE_2D, outputTexture);
// glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
// glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
// glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
// glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
//
// for (id<GPUImageInput> currentTarget in targets)
// {
// NSInteger indexOfObject = [targets indexOfObject:currentTarget];
// NSInteger targetTextureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
//
// [currentTarget setInputSize:CGSizeMake(bufferWidth, bufferHeight) atIndex:targetTextureIndex];
// [currentTarget setInputTexture:outputTexture atIndex:targetTextureIndex];
//
// [currentTarget newFrameReadyAtTime:currentSampleTime atIndex:targetTextureIndex];
// }
//
// CVPixelBufferUnlockBaseAddress(movieFrame, 0);
// CVOpenGLESTextureCacheFlush(coreVideoTextureCache, 0);
// CFRelease(texture);
//
// outputTexture = 0;
}
}
else
{
// Upload to texture
CVPixelBufferLockBaseAddress(movieFrame, 0);
outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:CGSizeMake(bufferWidth, bufferHeight) textureOptions:self.outputTextureOptions onlyTexture:YES];
glBindTexture(GL_TEXTURE_2D, [outputFramebuffer texture]);
// Using BGRA extension to pull in video frame data directly
glTexImage2D(GL_TEXTURE_2D,
0,
self.outputTextureOptions.internalFormat,
bufferWidth,
bufferHeight,
0,
self.outputTextureOptions.format,
self.outputTextureOptions.type,
CVPixelBufferGetBaseAddress(movieFrame));
for (id<GPUImageInput> currentTarget in targets)
{
NSInteger indexOfObject = [targets indexOfObject:currentTarget];
NSInteger targetTextureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
[currentTarget setInputSize:CGSizeMake(bufferWidth, bufferHeight) atIndex:targetTextureIndex];
[currentTarget setInputFramebuffer:outputFramebuffer atIndex:targetTextureIndex];
}
[outputFramebuffer unlock];
for (id<GPUImageInput> currentTarget in targets)
{
NSInteger indexOfObject = [targets indexOfObject:currentTarget];
NSInteger targetTextureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
[currentTarget newFrameReadyAtTime:currentSampleTime atIndex:targetTextureIndex];
}
CVPixelBufferUnlockBaseAddress(movieFrame, 0);
}
if (_runBenchmark)
{
CFAbsoluteTime currentFrameTime = (CFAbsoluteTimeGetCurrent() - startTime);
NSLog(@"Current frame time : %f ms", 1000.0 * currentFrameTime);
}
}
- (void)endProcessing;
{
keepLooping = NO;
[displayLink setPaused:YES];
for (id<GPUImageInput> currentTarget in targets)
{
[currentTarget endProcessing];
}
if (synchronizedMovieWriter != nil)
{
[synchronizedMovieWriter setVideoInputReadyCallback:^{return NO;}];
[synchronizedMovieWriter setAudioInputReadyCallback:^{return NO;}];
}
if (self.playerItem && (displayLink != nil))
{
[displayLink invalidate]; // remove from all run loops
displayLink = nil;
}
if ([self.delegate respondsToSelector:@selector(didCompletePlayingMovie)]) {
[self.delegate didCompletePlayingMovie];
}
self.delegate = nil;
}
- (void)cancelProcessing
{
if (reader) {
[reader cancelReading];
}
[self endProcessing];
}
- (void)convertYUVToRGBOutput;
{
[GPUImageContext setActiveShaderProgram:yuvConversionProgram];
outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:CGSizeMake(imageBufferWidth, imageBufferHeight) onlyTexture:NO];
[outputFramebuffer activateFramebuffer];
glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
static const GLfloat squareVertices[] = {
-1.0f, -1.0f,
1.0f, -1.0f,
-1.0f, 1.0f,
1.0f, 1.0f,
};
static const GLfloat textureCoordinates[] = {
0.0f, 0.0f,
1.0f, 0.0f,
0.0f, 1.0f,
1.0f, 1.0f,
};
glActiveTexture(GL_TEXTURE4);
glBindTexture(GL_TEXTURE_2D, luminanceTexture);
glUniform1i(yuvConversionLuminanceTextureUniform, 4);
glActiveTexture(GL_TEXTURE5);
glBindTexture(GL_TEXTURE_2D, chrominanceTexture);
glUniform1i(yuvConversionChrominanceTextureUniform, 5);
glUniformMatrix3fv(yuvConversionMatrixUniform, 1, GL_FALSE, _preferredConversion);
glVertexAttribPointer(yuvConversionPositionAttribute, 2, GL_FLOAT, 0, 0, squareVertices);
glVertexAttribPointer(yuvConversionTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
}
- (AVAssetReader*)assetReader {
return reader;
}
- (BOOL)audioEncodingIsFinished {
return audioEncodingIsFinished;
}
- (BOOL)videoEncodingIsFinished {
return videoEncodingIsFinished;
}
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageMovie.m
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 6,091
|
```objective-c
#import "GPUImageMedianFilter.h"
/*
3x3 median filter, adapted from "A Fast, Small-Radius GPU Median Filter" by Morgan McGuire in ShaderX6
path_to_url
Morgan McGuire and Kyle Whitson
Williams College
Register allocation tips by Victor Huang Xiaohuang
University of Illinois at Urbana-Champaign
path_to_url
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageMedianFragmentShaderString = SHADER_STRING
(
precision highp float;
varying vec2 textureCoordinate;
varying vec2 leftTextureCoordinate;
varying vec2 rightTextureCoordinate;
varying vec2 topTextureCoordinate;
varying vec2 topLeftTextureCoordinate;
varying vec2 topRightTextureCoordinate;
varying vec2 bottomTextureCoordinate;
varying vec2 bottomLeftTextureCoordinate;
varying vec2 bottomRightTextureCoordinate;
uniform sampler2D inputImageTexture;
#define s2(a, b) temp = a; a = min(a, b); b = max(temp, b);
#define mn3(a, b, c) s2(a, b); s2(a, c);
#define mx3(a, b, c) s2(b, c); s2(a, c);
#define mnmx3(a, b, c) mx3(a, b, c); s2(a, b); // 3 exchanges
#define mnmx4(a, b, c, d) s2(a, b); s2(c, d); s2(a, c); s2(b, d); // 4 exchanges
#define mnmx5(a, b, c, d, e) s2(a, b); s2(c, d); mn3(a, c, e); mx3(b, d, e); // 6 exchanges
#define mnmx6(a, b, c, d, e, f) s2(a, d); s2(b, e); s2(c, f); mn3(a, b, c); mx3(d, e, f); // 7 exchanges
void main()
{
vec3 v[6];
v[0] = texture2D(inputImageTexture, bottomLeftTextureCoordinate).rgb;
v[1] = texture2D(inputImageTexture, topRightTextureCoordinate).rgb;
v[2] = texture2D(inputImageTexture, topLeftTextureCoordinate).rgb;
v[3] = texture2D(inputImageTexture, bottomRightTextureCoordinate).rgb;
v[4] = texture2D(inputImageTexture, leftTextureCoordinate).rgb;
v[5] = texture2D(inputImageTexture, rightTextureCoordinate).rgb;
// v[6] = texture2D(inputImageTexture, bottomTextureCoordinate).rgb;
// v[7] = texture2D(inputImageTexture, topTextureCoordinate).rgb;
vec3 temp;
mnmx6(v[0], v[1], v[2], v[3], v[4], v[5]);
v[5] = texture2D(inputImageTexture, bottomTextureCoordinate).rgb;
mnmx5(v[1], v[2], v[3], v[4], v[5]);
v[5] = texture2D(inputImageTexture, topTextureCoordinate).rgb;
mnmx4(v[2], v[3], v[4], v[5]);
v[5] = texture2D(inputImageTexture, textureCoordinate).rgb;
mnmx3(v[3], v[4], v[5]);
gl_FragColor = vec4(v[4], 1.0);
}
);
#else
NSString *const kGPUImageMedianFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
varying vec2 leftTextureCoordinate;
varying vec2 rightTextureCoordinate;
varying vec2 topTextureCoordinate;
varying vec2 topLeftTextureCoordinate;
varying vec2 topRightTextureCoordinate;
varying vec2 bottomTextureCoordinate;
varying vec2 bottomLeftTextureCoordinate;
varying vec2 bottomRightTextureCoordinate;
uniform sampler2D inputImageTexture;
#define s2(a, b) temp = a; a = min(a, b); b = max(temp, b);
#define mn3(a, b, c) s2(a, b); s2(a, c);
#define mx3(a, b, c) s2(b, c); s2(a, c);
#define mnmx3(a, b, c) mx3(a, b, c); s2(a, b); // 3 exchanges
#define mnmx4(a, b, c, d) s2(a, b); s2(c, d); s2(a, c); s2(b, d); // 4 exchanges
#define mnmx5(a, b, c, d, e) s2(a, b); s2(c, d); mn3(a, c, e); mx3(b, d, e); // 6 exchanges
#define mnmx6(a, b, c, d, e, f) s2(a, d); s2(b, e); s2(c, f); mn3(a, b, c); mx3(d, e, f); // 7 exchanges
void main()
{
vec3 v[6];
v[0] = texture2D(inputImageTexture, bottomLeftTextureCoordinate).rgb;
v[1] = texture2D(inputImageTexture, topRightTextureCoordinate).rgb;
v[2] = texture2D(inputImageTexture, topLeftTextureCoordinate).rgb;
v[3] = texture2D(inputImageTexture, bottomRightTextureCoordinate).rgb;
v[4] = texture2D(inputImageTexture, leftTextureCoordinate).rgb;
v[5] = texture2D(inputImageTexture, rightTextureCoordinate).rgb;
// v[6] = texture2D(inputImageTexture, bottomTextureCoordinate).rgb;
// v[7] = texture2D(inputImageTexture, topTextureCoordinate).rgb;
vec3 temp;
mnmx6(v[0], v[1], v[2], v[3], v[4], v[5]);
v[5] = texture2D(inputImageTexture, bottomTextureCoordinate).rgb;
mnmx5(v[1], v[2], v[3], v[4], v[5]);
v[5] = texture2D(inputImageTexture, topTextureCoordinate).rgb;
mnmx4(v[2], v[3], v[4], v[5]);
v[5] = texture2D(inputImageTexture, textureCoordinate).rgb;
mnmx3(v[3], v[4], v[5]);
gl_FragColor = vec4(v[4], 1.0);
}
);
#endif
@implementation GPUImageMedianFilter
#pragma mark -
#pragma mark Initialization and teardown
- (id)init;
{
if (!(self = [super initWithFragmentShaderFromString:kGPUImageMedianFragmentShaderString]))
{
return nil;
}
hasOverriddenImageSizeFactor = NO;
return self;
}
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageMedianFilter.m
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 1,779
|
```objective-c
#import "GPUImageFilter.h"
/** Pixels with a luminance above the threshold will appear white, and those below will be black
*/
@interface GPUImageLuminanceThresholdFilter : GPUImageFilter
{
GLint thresholdUniform;
}
/** Anything above this luminance will be white, and anything below black. Ranges from 0.0 to 1.0, with 0.5 as the default
*/
@property(readwrite, nonatomic) CGFloat threshold;
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageLuminanceThresholdFilter.h
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 99
|
```objective-c
#import "GPUImage3x3ConvolutionFilter.h"
@interface GPUImageLaplacianFilter : GPUImage3x3ConvolutionFilter
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageLaplacianFilter.h
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 33
|
```objective-c
#import "GPUImageLuminanceThresholdFilter.h"
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageLuminanceThresholdFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform highp float threshold;
const highp vec3 W = vec3(0.2125, 0.7154, 0.0721);
void main()
{
highp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
highp float luminance = dot(textureColor.rgb, W);
highp float thresholdResult = step(threshold, luminance);
gl_FragColor = vec4(vec3(thresholdResult), textureColor.w);
}
);
#else
NSString *const kGPUImageLuminanceThresholdFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform float threshold;
const vec3 W = vec3(0.2125, 0.7154, 0.0721);
void main()
{
vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
float luminance = dot(textureColor.rgb, W);
float thresholdResult = step(threshold, luminance);
gl_FragColor = vec4(vec3(thresholdResult), textureColor.w);
}
);
#endif
@implementation GPUImageLuminanceThresholdFilter
@synthesize threshold = _threshold;
#pragma mark -
#pragma mark Initialization
- (id)init;
{
if (!(self = [super initWithFragmentShaderFromString:kGPUImageLuminanceThresholdFragmentShaderString]))
{
return nil;
}
thresholdUniform = [filterProgram uniformIndex:@"threshold"];
self.threshold = 0.5;
return self;
}
#pragma mark -
#pragma mark Accessors
- (void)setThreshold:(CGFloat)newValue;
{
_threshold = newValue;
[self setFloat:_threshold forUniform:thresholdUniform program:filterProgram];
}
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageLuminanceThresholdFilter.m
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 432
|
```objective-c
#import "GPUImageFilterGroup.h"
@interface GPUImageAdaptiveThresholdFilter : GPUImageFilterGroup
/** A multiplier for the background averaging blur radius in pixels, with a default of 4
*/
@property(readwrite, nonatomic) CGFloat blurRadiusInPixels;
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageAdaptiveThresholdFilter.h
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 56
|
```objective-c
#import "GPUImageOutput.h"
#define STRINGIZE(x) #x
#define STRINGIZE2(x) STRINGIZE(x)
#define SHADER_STRING(text) @ STRINGIZE2(text)
#define GPUImageHashIdentifier #
#define GPUImageWrappedLabel(x) x
#define GPUImageEscapedHashIdentifier(a) GPUImageWrappedLabel(GPUImageHashIdentifier)a
extern NSString *const kGPUImageVertexShaderString;
extern NSString *const kGPUImagePassthroughFragmentShaderString;
struct GPUVector4 {
GLfloat one;
GLfloat two;
GLfloat three;
GLfloat four;
};
typedef struct GPUVector4 GPUVector4;
struct GPUVector3 {
GLfloat one;
GLfloat two;
GLfloat three;
};
typedef struct GPUVector3 GPUVector3;
struct GPUMatrix4x4 {
GPUVector4 one;
GPUVector4 two;
GPUVector4 three;
GPUVector4 four;
};
typedef struct GPUMatrix4x4 GPUMatrix4x4;
struct GPUMatrix3x3 {
GPUVector3 one;
GPUVector3 two;
GPUVector3 three;
};
typedef struct GPUMatrix3x3 GPUMatrix3x3;
/** GPUImage's base filter class
Filters and other subsequent elements in the chain conform to the GPUImageInput protocol, which lets them take in the supplied or processed texture from the previous link in the chain and do something with it. Objects one step further down the chain are considered targets, and processing can be branched by adding multiple targets to a single output or filter.
*/
@interface GPUImageFilter : GPUImageOutput <GPUImageInput>
{
GPUImageFramebuffer *firstInputFramebuffer;
GLProgram *filterProgram;
GLint filterPositionAttribute, filterTextureCoordinateAttribute;
GLint filterInputTextureUniform;
GLfloat backgroundColorRed, backgroundColorGreen, backgroundColorBlue, backgroundColorAlpha;
BOOL isEndProcessing;
CGSize currentFilterSize;
GPUImageRotationMode inputRotation;
BOOL currentlyReceivingMonochromeInput;
NSMutableDictionary *uniformStateRestorationBlocks;
dispatch_semaphore_t imageCaptureSemaphore;
}
@property(readonly) CVPixelBufferRef renderTarget;
@property(readwrite, nonatomic) BOOL preventRendering;
@property(readwrite, nonatomic) BOOL currentlyReceivingMonochromeInput;
/// @name Initialization and teardown
/**
Initialize with vertex and fragment shaders
You make take advantage of the SHADER_STRING macro to write your shaders in-line.
@param vertexShaderString Source code of the vertex shader to use
@param fragmentShaderString Source code of the fragment shader to use
*/
- (id)initWithVertexShaderFromString:(NSString *)vertexShaderString fragmentShaderFromString:(NSString *)fragmentShaderString;
/**
Initialize with a fragment shader
You may take advantage of the SHADER_STRING macro to write your shader in-line.
@param fragmentShaderString Source code of fragment shader to use
*/
- (id)initWithFragmentShaderFromString:(NSString *)fragmentShaderString;
/**
Initialize with a fragment shader
@param fragmentShaderFilename Filename of fragment shader to load
*/
- (id)initWithFragmentShaderFromFile:(NSString *)fragmentShaderFilename;
- (void)initializeAttributes;
- (void)setupFilterForSize:(CGSize)filterFrameSize;
- (CGSize)rotatedSize:(CGSize)sizeToRotate forIndex:(NSInteger)textureIndex;
- (CGPoint)rotatedPoint:(CGPoint)pointToRotate forRotation:(GPUImageRotationMode)rotation;
/// @name Managing the display FBOs
/** Size of the frame buffer object
*/
- (CGSize)sizeOfFBO;
/// @name Rendering
+ (const GLfloat *)textureCoordinatesForRotation:(GPUImageRotationMode)rotationMode;
- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;
- (void)informTargetsAboutNewFrameAtTime:(CMTime)frameTime;
- (CGSize)outputFrameSize;
/// @name Input parameters
- (void)setBackgroundColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent alpha:(GLfloat)alphaComponent;
- (void)setInteger:(GLint)newInteger forUniformName:(NSString *)uniformName;
- (void)setFloat:(GLfloat)newFloat forUniformName:(NSString *)uniformName;
- (void)setSize:(CGSize)newSize forUniformName:(NSString *)uniformName;
- (void)setPoint:(CGPoint)newPoint forUniformName:(NSString *)uniformName;
- (void)setFloatVec3:(GPUVector3)newVec3 forUniformName:(NSString *)uniformName;
- (void)setFloatVec4:(GPUVector4)newVec4 forUniform:(NSString *)uniformName;
- (void)setFloatArray:(GLfloat *)array length:(GLsizei)count forUniform:(NSString*)uniformName;
- (void)setMatrix3f:(GPUMatrix3x3)matrix forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;
- (void)setMatrix4f:(GPUMatrix4x4)matrix forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;
- (void)setFloat:(GLfloat)floatValue forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;
- (void)setPoint:(CGPoint)pointValue forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;
- (void)setSize:(CGSize)sizeValue forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;
- (void)setVec3:(GPUVector3)vectorValue forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;
- (void)setVec4:(GPUVector4)vectorValue forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;
- (void)setFloatArray:(GLfloat *)arrayValue length:(GLsizei)arrayLength forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;
- (void)setInteger:(GLint)intValue forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;
- (void)setAndExecuteUniformStateCallbackAtIndex:(GLint)uniform forProgram:(GLProgram *)shaderProgram toBlock:(dispatch_block_t)uniformStateBlock;
- (void)setUniformsForProgramAtIndex:(NSUInteger)programIndex;
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageFilter.h
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 1,282
|
```objective-c
#import "GPUImageHarrisCornerDetectionFilter.h"
#import "GPUImageGaussianBlurFilter.h"
#import "GPUImageXYDerivativeFilter.h"
#import "GPUImageGrayscaleFilter.h"
#import "GPUImageThresholdedNonMaximumSuppressionFilter.h"
#import "GPUImageColorPackingFilter.h"
#import "GPUImageGaussianBlurFilter.h"
@interface GPUImageHarrisCornerDetectionFilter()
- (void)extractCornerLocationsFromImageAtFrameTime:(CMTime)frameTime;
@end
// This is the Harris corner detector, as described in
// C. Harris and M. Stephens. A Combined Corner and Edge Detector. Proc. Alvey Vision Conf., Univ. Manchester, pp. 147-151, 1988.
@implementation GPUImageHarrisCornerDetectionFilter
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageHarrisCornerDetectionFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform lowp float sensitivity;
const mediump float harrisConstant = 0.04;
void main()
{
mediump vec3 derivativeElements = texture2D(inputImageTexture, textureCoordinate).rgb;
mediump float derivativeSum = derivativeElements.x + derivativeElements.y;
mediump float zElement = (derivativeElements.z * 2.0) - 1.0;
// R = Ix^2 * Iy^2 - Ixy * Ixy - k * (Ix^2 + Iy^2)^2
mediump float cornerness = derivativeElements.x * derivativeElements.y - (zElement * zElement) - harrisConstant * derivativeSum * derivativeSum;
gl_FragColor = vec4(vec3(cornerness * sensitivity), 1.0);
}
);
#else
NSString *const kGPUImageHarrisCornerDetectionFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform float sensitivity;
const float harrisConstant = 0.04;
void main()
{
vec3 derivativeElements = texture2D(inputImageTexture, textureCoordinate).rgb;
float derivativeSum = derivativeElements.x + derivativeElements.y;
float zElement = (derivativeElements.z * 2.0) - 1.0;
// R = Ix^2 * Iy^2 - Ixy * Ixy - k * (Ix^2 + Iy^2)^2
float cornerness = derivativeElements.x * derivativeElements.y - (zElement * zElement) - harrisConstant * derivativeSum * derivativeSum;
gl_FragColor = vec4(vec3(cornerness * sensitivity), 1.0);
}
);
#endif
@synthesize blurRadiusInPixels;
@synthesize cornersDetectedBlock;
@synthesize sensitivity = _sensitivity;
@synthesize threshold = _threshold;
@synthesize intermediateImages = _intermediateImages;
#pragma mark -
#pragma mark Initialization and teardown
- (id)init;
{
if (!(self = [self initWithCornerDetectionFragmentShader:kGPUImageHarrisCornerDetectionFragmentShaderString]))
{
return nil;
}
return self;
}
- (id)initWithCornerDetectionFragmentShader:(NSString *)cornerDetectionFragmentShader;
{
if (!(self = [super init]))
{
return nil;
}
#ifdef DEBUGFEATUREDETECTION
_intermediateImages = [[NSMutableArray alloc] init];
#endif
// First pass: reduce to luminance and take the derivative of the luminance texture
derivativeFilter = [[GPUImageXYDerivativeFilter alloc] init];
[self addFilter:derivativeFilter];
#ifdef DEBUGFEATUREDETECTION
__unsafe_unretained NSMutableArray *weakIntermediateImages = _intermediateImages;
__unsafe_unretained GPUImageFilter *weakFilter = derivativeFilter;
[derivativeFilter setFrameProcessingCompletionBlock:^(GPUImageOutput *filter, CMTime frameTime){
UIImage *intermediateImage = [weakFilter imageFromCurrentlyProcessedOutput];
[weakIntermediateImages addObject:intermediateImage];
}];
#endif
// Second pass: blur the derivative
blurFilter = [[GPUImageGaussianBlurFilter alloc] init];
[self addFilter:blurFilter];
#ifdef DEBUGFEATUREDETECTION
weakFilter = blurFilter;
[blurFilter setFrameProcessingCompletionBlock:^(GPUImageOutput *filter, CMTime frameTime){
UIImage *intermediateImage = [weakFilter imageFromCurrentlyProcessedOutput];
[weakIntermediateImages addObject:intermediateImage];
}];
#endif
// Third pass: apply the Harris corner detection calculation
harrisCornerDetectionFilter = [[GPUImageFilter alloc] initWithFragmentShaderFromString:cornerDetectionFragmentShader];
[self addFilter:harrisCornerDetectionFilter];
#ifdef DEBUGFEATUREDETECTION
weakFilter = harrisCornerDetectionFilter;
[harrisCornerDetectionFilter setFrameProcessingCompletionBlock:^(GPUImageOutput *filter, CMTime frameTime){
UIImage *intermediateImage = [weakFilter imageFromCurrentlyProcessedOutput];
[weakIntermediateImages addObject:intermediateImage];
}];
#endif
// Fourth pass: apply non-maximum suppression and thresholding to find the local maxima
nonMaximumSuppressionFilter = [[GPUImageThresholdedNonMaximumSuppressionFilter alloc] init];
[self addFilter:nonMaximumSuppressionFilter];
__unsafe_unretained GPUImageHarrisCornerDetectionFilter *weakSelf = self;
#ifdef DEBUGFEATUREDETECTION
weakFilter = nonMaximumSuppressionFilter;
[nonMaximumSuppressionFilter setFrameProcessingCompletionBlock:^(GPUImageOutput *filter, CMTime frameTime){
UIImage *intermediateImage = [weakFilter imageFromCurrentlyProcessedOutput];
[weakIntermediateImages addObject:intermediateImage];
[weakSelf extractCornerLocationsFromImageAtFrameTime:frameTime];
}];
#else
[nonMaximumSuppressionFilter setFrameProcessingCompletionBlock:^(GPUImageOutput *filter, CMTime frameTime) {
[weakSelf extractCornerLocationsFromImageAtFrameTime:frameTime];
}];
#endif
// Sixth pass: compress the thresholded points into the RGBA channels
// colorPackingFilter = [[GPUImageColorPackingFilter alloc] init];
// [self addFilter:colorPackingFilter];
//
//
//#ifdef DEBUGFEATUREDETECTION
// __unsafe_unretained GPUImageHarrisCornerDetectionFilter *weakSelf = self;
// weakFilter = colorPackingFilter;
// [colorPackingFilter setFrameProcessingCompletionBlock:^(GPUImageOutput *filter, CMTime frameTime){
// NSLog(@"Triggered response from compaction filter");
//
// UIImage *intermediateImage = [weakFilter imageFromCurrentlyProcessedOutput];
// [weakIntermediateImages addObject:intermediateImage];
//
// [weakSelf extractCornerLocationsFromImageAtFrameTime:frameTime];
// }];
//#else
// __unsafe_unretained GPUImageHarrisCornerDetectionFilter *weakSelf = self;
// [colorPackingFilter setFrameProcessingCompletionBlock:^(GPUImageOutput *filter, CMTime frameTime) {
// [weakSelf extractCornerLocationsFromImageAtFrameTime:frameTime];
// }];
//#endif
[derivativeFilter addTarget:blurFilter];
[blurFilter addTarget:harrisCornerDetectionFilter];
[harrisCornerDetectionFilter addTarget:nonMaximumSuppressionFilter];
// [simpleThresholdFilter addTarget:colorPackingFilter];
self.initialFilters = [NSArray arrayWithObjects:derivativeFilter, nil];
// self.terminalFilter = colorPackingFilter;
self.terminalFilter = nonMaximumSuppressionFilter;
self.blurRadiusInPixels = 2.0;
self.sensitivity = 5.0;
self.threshold = 0.20;
return self;
}
- (void)dealloc;
{
free(rawImagePixels);
free(cornersArray);
}
#pragma mark -
#pragma mark Corner extraction
- (void)extractCornerLocationsFromImageAtFrameTime:(CMTime)frameTime;
{
// we need a normal color texture for this filter
NSAssert(self.outputTextureOptions.internalFormat == GL_RGBA, @"The output texture format for this filter must be GL_RGBA.");
NSAssert(self.outputTextureOptions.type == GL_UNSIGNED_BYTE, @"The type of the output texture of this filter must be GL_UNSIGNED_BYTE.");
NSUInteger numberOfCorners = 0;
CGSize imageSize = nonMaximumSuppressionFilter.outputFrameSize;
unsigned int imageByteSize = imageSize.width * imageSize.height * 4;
if (rawImagePixels == NULL)
{
rawImagePixels = (GLubyte *)malloc(imageByteSize);
cornersArray = calloc(512 * 2, sizeof(GLfloat));
}
glReadPixels(0, 0, (int)imageSize.width, (int)imageSize.height, GL_RGBA, GL_UNSIGNED_BYTE, rawImagePixels);
CFAbsoluteTime startTime = CFAbsoluteTimeGetCurrent();
unsigned int imageWidth = imageSize.width * 4;
unsigned int currentByte = 0;
unsigned int cornerStorageIndex = 0;
while (currentByte < imageByteSize)
{
GLubyte colorByte = rawImagePixels[currentByte];
if (colorByte > 0)
{
unsigned int xCoordinate = currentByte % imageWidth;
unsigned int yCoordinate = currentByte / imageWidth;
cornersArray[cornerStorageIndex++] = (CGFloat)(xCoordinate / 4) / imageSize.width;
cornersArray[cornerStorageIndex++] = (CGFloat)(yCoordinate) / imageSize.height;
numberOfCorners++;
numberOfCorners = MIN(numberOfCorners, 511);
cornerStorageIndex = MIN(cornerStorageIndex, 1021);
}
currentByte +=4;
}
CFAbsoluteTime currentFrameTime = (CFAbsoluteTimeGetCurrent() - startTime);
NSLog(@"Processing time : %f ms", 1000.0 * currentFrameTime);
if (cornersDetectedBlock != NULL)
{
cornersDetectedBlock(cornersArray, numberOfCorners, frameTime);
}
}
- (BOOL)wantsMonochromeInput;
{
// return YES;
return NO;
}
#pragma mark -
#pragma mark Accessors
- (void)setBlurRadiusInPixels:(CGFloat)newValue;
{
blurFilter.blurRadiusInPixels = newValue;
}
- (CGFloat)blurRadiusInPixels;
{
return blurFilter.blurRadiusInPixels;
}
- (void)setSensitivity:(CGFloat)newValue;
{
_sensitivity = newValue;
[harrisCornerDetectionFilter setFloat:newValue forUniformName:@"sensitivity"];
}
- (void)setThreshold:(CGFloat)newValue;
{
nonMaximumSuppressionFilter.threshold = newValue;
}
- (CGFloat)threshold;
{
return nonMaximumSuppressionFilter.threshold;
}
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageHarrisCornerDetectionFilter.m
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 2,293
|
```objective-c
#import "GPUImageFilter.h"
@interface GPUImageBrightnessFilter : GPUImageFilter
{
GLint brightnessUniform;
}
// Brightness ranges from -1.0 to 1.0, with 0.0 as the normal level
@property(readwrite, nonatomic) CGFloat brightness;
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageBrightnessFilter.h
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 62
|
```objective-c
#import "GPUImageRawDataInput.h"
@interface GPUImageRawDataInput()
- (void)uploadBytes:(GLubyte *)bytesToUpload;
@end
@implementation GPUImageRawDataInput
@synthesize pixelFormat = _pixelFormat;
@synthesize pixelType = _pixelType;
#pragma mark -
#pragma mark Initialization and teardown
- (id)initWithBytes:(GLubyte *)bytesToUpload size:(CGSize)imageSize;
{
if (!(self = [self initWithBytes:bytesToUpload size:imageSize pixelFormat:GPUPixelFormatBGRA type:GPUPixelTypeUByte]))
{
return nil;
}
return self;
}
- (id)initWithBytes:(GLubyte *)bytesToUpload size:(CGSize)imageSize pixelFormat:(GPUPixelFormat)pixelFormat;
{
if (!(self = [self initWithBytes:bytesToUpload size:imageSize pixelFormat:pixelFormat type:GPUPixelTypeUByte]))
{
return nil;
}
return self;
}
- (id)initWithBytes:(GLubyte *)bytesToUpload size:(CGSize)imageSize pixelFormat:(GPUPixelFormat)pixelFormat type:(GPUPixelType)pixelType;
{
if (!(self = [super init]))
{
return nil;
}
dataUpdateSemaphore = dispatch_semaphore_create(1);
uploadedImageSize = imageSize;
self.pixelFormat = pixelFormat;
self.pixelType = pixelType;
[self uploadBytes:bytesToUpload];
return self;
}
// ARC forbids explicit message send of 'release'; since iOS 6 even for dispatch_release() calls: stripping it out in that case is required.
- (void)dealloc;
{
#if !OS_OBJECT_USE_OBJC
if (dataUpdateSemaphore != NULL)
{
dispatch_release(dataUpdateSemaphore);
}
#endif
}
#pragma mark -
#pragma mark Image rendering
- (void)uploadBytes:(GLubyte *)bytesToUpload;
{
[GPUImageContext useImageProcessingContext];
// TODO: This probably isn't right, and will need to be corrected
outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:uploadedImageSize textureOptions:self.outputTextureOptions onlyTexture:YES];
glBindTexture(GL_TEXTURE_2D, [outputFramebuffer texture]);
glTexImage2D(GL_TEXTURE_2D, 0, _pixelFormat==GPUPixelFormatRGB ? GL_RGB : GL_RGBA, (int)uploadedImageSize.width, (int)uploadedImageSize.height, 0, (GLint)_pixelFormat, (GLenum)_pixelType, bytesToUpload);
}
- (void)updateDataFromBytes:(GLubyte *)bytesToUpload size:(CGSize)imageSize;
{
uploadedImageSize = imageSize;
[self uploadBytes:bytesToUpload];
}
- (void)processData;
{
if (dispatch_semaphore_wait(dataUpdateSemaphore, DISPATCH_TIME_NOW) != 0)
{
return;
}
runAsynchronouslyOnVideoProcessingQueue(^{
CGSize pixelSizeOfImage = [self outputImageSize];
for (id<GPUImageInput> currentTarget in targets)
{
NSInteger indexOfObject = [targets indexOfObject:currentTarget];
NSInteger textureIndexOfTarget = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
[currentTarget setInputSize:pixelSizeOfImage atIndex:textureIndexOfTarget];
[currentTarget setInputFramebuffer:outputFramebuffer atIndex:textureIndexOfTarget];
[currentTarget newFrameReadyAtTime:kCMTimeInvalid atIndex:textureIndexOfTarget];
}
dispatch_semaphore_signal(dataUpdateSemaphore);
});
}
- (void)processDataForTimestamp:(CMTime)frameTime;
{
if (dispatch_semaphore_wait(dataUpdateSemaphore, DISPATCH_TIME_NOW) != 0)
{
return;
}
runAsynchronouslyOnVideoProcessingQueue(^{
CGSize pixelSizeOfImage = [self outputImageSize];
for (id<GPUImageInput> currentTarget in targets)
{
NSInteger indexOfObject = [targets indexOfObject:currentTarget];
NSInteger textureIndexOfTarget = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
[currentTarget setInputSize:pixelSizeOfImage atIndex:textureIndexOfTarget];
[currentTarget newFrameReadyAtTime:frameTime atIndex:textureIndexOfTarget];
}
dispatch_semaphore_signal(dataUpdateSemaphore);
});
}
- (CGSize)outputImageSize;
{
return uploadedImageSize;
}
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageRawDataInput.m
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 933
|
```objective-c
#import "GPUImageTwoInputCrossTextureSamplingFilter.h"
#import "GPUImageFilterGroup.h"
@interface GPUImagePoissonBlendFilter : GPUImageTwoInputCrossTextureSamplingFilter
{
GLint mixUniform;
GPUImageFramebuffer *secondOutputFramebuffer;
}
// Mix ranges from 0.0 (only image 1) to 1.0 (only image 2 gradients), with 1.0 as the normal level
@property(readwrite, nonatomic) CGFloat mix;
// The number of times to propagate the gradients.
// Crank this up to 100 or even 1000 if you want to get anywhere near convergence. Yes, this will be slow.
@property(readwrite, nonatomic) NSUInteger numIterations;
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImagePoissonBlendFilter.h
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 152
|
```objective-c
#import "GPUImageTiltShiftFilter.h"
#import "GPUImageFilter.h"
#import "GPUImageTwoInputFilter.h"
#import "GPUImageGaussianBlurFilter.h"
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageTiltShiftFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
varying highp vec2 textureCoordinate2;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
uniform highp float topFocusLevel;
uniform highp float bottomFocusLevel;
uniform highp float focusFallOffRate;
void main()
{
lowp vec4 sharpImageColor = texture2D(inputImageTexture, textureCoordinate);
lowp vec4 blurredImageColor = texture2D(inputImageTexture2, textureCoordinate2);
lowp float blurIntensity = 1.0 - smoothstep(topFocusLevel - focusFallOffRate, topFocusLevel, textureCoordinate2.y);
blurIntensity += smoothstep(bottomFocusLevel, bottomFocusLevel + focusFallOffRate, textureCoordinate2.y);
gl_FragColor = mix(sharpImageColor, blurredImageColor, blurIntensity);
}
);
#else
NSString *const kGPUImageTiltShiftFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
varying vec2 textureCoordinate2;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
uniform float topFocusLevel;
uniform float bottomFocusLevel;
uniform float focusFallOffRate;
void main()
{
vec4 sharpImageColor = texture2D(inputImageTexture, textureCoordinate);
vec4 blurredImageColor = texture2D(inputImageTexture2, textureCoordinate2);
float blurIntensity = 1.0 - smoothstep(topFocusLevel - focusFallOffRate, topFocusLevel, textureCoordinate2.y);
blurIntensity += smoothstep(bottomFocusLevel, bottomFocusLevel + focusFallOffRate, textureCoordinate2.y);
gl_FragColor = mix(sharpImageColor, blurredImageColor, blurIntensity);
}
);
#endif
@implementation GPUImageTiltShiftFilter
@synthesize blurRadiusInPixels;
@synthesize topFocusLevel = _topFocusLevel;
@synthesize bottomFocusLevel = _bottomFocusLevel;
@synthesize focusFallOffRate = _focusFallOffRate;
- (id)init;
{
if (!(self = [super init]))
{
return nil;
}
// First pass: apply a variable Gaussian blur
blurFilter = [[GPUImageGaussianBlurFilter alloc] init];
[self addFilter:blurFilter];
// Second pass: combine the blurred image with the original sharp one
tiltShiftFilter = [[GPUImageTwoInputFilter alloc] initWithFragmentShaderFromString:kGPUImageTiltShiftFragmentShaderString];
[self addFilter:tiltShiftFilter];
// Texture location 0 needs to be the sharp image for both the blur and the second stage processing
[blurFilter addTarget:tiltShiftFilter atTextureLocation:1];
// To prevent double updating of this filter, disable updates from the sharp image side
// self.inputFilterToIgnoreForUpdates = tiltShiftFilter;
self.initialFilters = [NSArray arrayWithObjects:blurFilter, tiltShiftFilter, nil];
self.terminalFilter = tiltShiftFilter;
self.topFocusLevel = 0.4;
self.bottomFocusLevel = 0.6;
self.focusFallOffRate = 0.2;
self.blurRadiusInPixels = 7.0;
return self;
}
#pragma mark -
#pragma mark Accessors
- (void)setBlurRadiusInPixels:(CGFloat)newValue;
{
blurFilter.blurRadiusInPixels = newValue;
}
- (CGFloat)blurRadiusInPixels;
{
return blurFilter.blurRadiusInPixels;
}
- (void)setTopFocusLevel:(CGFloat)newValue;
{
_topFocusLevel = newValue;
[tiltShiftFilter setFloat:newValue forUniformName:@"topFocusLevel"];
}
- (void)setBottomFocusLevel:(CGFloat)newValue;
{
_bottomFocusLevel = newValue;
[tiltShiftFilter setFloat:newValue forUniformName:@"bottomFocusLevel"];
}
- (void)setFocusFallOffRate:(CGFloat)newValue;
{
_focusFallOffRate = newValue;
[tiltShiftFilter setFloat:newValue forUniformName:@"focusFallOffRate"];
}
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageTiltShiftFilter.m
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 935
|
```objective-c
#import "GPUImageSubtractBlendFilter.h"
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageSubtractBlendFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
varying highp vec2 textureCoordinate2;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
void main()
{
lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
lowp vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);
gl_FragColor = vec4(textureColor.rgb - textureColor2.rgb, textureColor.a);
}
);
#else
NSString *const kGPUImageSubtractBlendFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
varying vec2 textureCoordinate2;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
void main()
{
vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);
gl_FragColor = vec4(textureColor.rgb - textureColor2.rgb, textureColor.a);
}
);
#endif
@implementation GPUImageSubtractBlendFilter
- (id)init;
{
if (!(self = [super initWithFragmentShaderFromString:kGPUImageSubtractBlendFragmentShaderString]))
{
return nil;
}
return self;
}
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageSubtractBlendFilter.m
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 311
|
```objective-c
#import "GPUImageLocalBinaryPatternFilter.h"
// This is based on "Accelerating image recognition on mobile devices using GPGPU" by Miguel Bordallo Lopez, Henri Nykanen, Jari Hannuksela, Olli Silven and Markku Vehvilainen
// path_to_url~jhannuks/publications/SPIE2011a.pdf
// Right pixel is the most significant bit, traveling clockwise to get to the upper right, which is the least significant
// If the external pixel is greater than or equal to the center, set to 1, otherwise 0
//
// 2 1 0
// 3 7
// 4 5 6
// 01101101
// 76543210
@implementation GPUImageLocalBinaryPatternFilter
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageLocalBinaryPatternFragmentShaderString = SHADER_STRING
(
precision highp float;
varying vec2 textureCoordinate;
varying vec2 leftTextureCoordinate;
varying vec2 rightTextureCoordinate;
varying vec2 topTextureCoordinate;
varying vec2 topLeftTextureCoordinate;
varying vec2 topRightTextureCoordinate;
varying vec2 bottomTextureCoordinate;
varying vec2 bottomLeftTextureCoordinate;
varying vec2 bottomRightTextureCoordinate;
uniform sampler2D inputImageTexture;
void main()
{
lowp float centerIntensity = texture2D(inputImageTexture, textureCoordinate).r;
lowp float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r;
lowp float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r;
lowp float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r;
lowp float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r;
lowp float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r;
lowp float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r;
lowp float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r;
lowp float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r;
lowp float byteTally = 1.0 / 255.0 * step(centerIntensity, topRightIntensity);
byteTally += 2.0 / 255.0 * step(centerIntensity, topIntensity);
byteTally += 4.0 / 255.0 * step(centerIntensity, topLeftIntensity);
byteTally += 8.0 / 255.0 * step(centerIntensity, leftIntensity);
byteTally += 16.0 / 255.0 * step(centerIntensity, bottomLeftIntensity);
byteTally += 32.0 / 255.0 * step(centerIntensity, bottomIntensity);
byteTally += 64.0 / 255.0 * step(centerIntensity, bottomRightIntensity);
byteTally += 128.0 / 255.0 * step(centerIntensity, rightIntensity);
// TODO: Replace the above with a dot product and two vec4s
// TODO: Apply step to a matrix, rather than individually
gl_FragColor = vec4(byteTally, byteTally, byteTally, 1.0);
}
);
#else
NSString *const kGPUImageLocalBinaryPatternFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
varying vec2 leftTextureCoordinate;
varying vec2 rightTextureCoordinate;
varying vec2 topTextureCoordinate;
varying vec2 topLeftTextureCoordinate;
varying vec2 topRightTextureCoordinate;
varying vec2 bottomTextureCoordinate;
varying vec2 bottomLeftTextureCoordinate;
varying vec2 bottomRightTextureCoordinate;
uniform sampler2D inputImageTexture;
void main()
{
float centerIntensity = texture2D(inputImageTexture, textureCoordinate).r;
float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r;
float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r;
float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r;
float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r;
float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r;
float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r;
float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r;
float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r;
float byteTally = 1.0 / 255.0 * step(centerIntensity, topRightIntensity);
byteTally += 2.0 / 255.0 * step(centerIntensity, topIntensity);
byteTally += 4.0 / 255.0 * step(centerIntensity, topLeftIntensity);
byteTally += 8.0 / 255.0 * step(centerIntensity, leftIntensity);
byteTally += 16.0 / 255.0 * step(centerIntensity, bottomLeftIntensity);
byteTally += 32.0 / 255.0 * step(centerIntensity, bottomIntensity);
byteTally += 64.0 / 255.0 * step(centerIntensity, bottomRightIntensity);
byteTally += 128.0 / 255.0 * step(centerIntensity, rightIntensity);
// TODO: Replace the above with a dot product and two vec4s
// TODO: Apply step to a matrix, rather than individually
gl_FragColor = vec4(byteTally, byteTally, byteTally, 1.0);
}
);
#endif
#pragma mark -
#pragma mark Initialization and teardown
- (id)init;
{
if (!(self = [super initWithFragmentShaderFromString:kGPUImageLocalBinaryPatternFragmentShaderString]))
{
return nil;
}
return self;
}
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageLocalBinaryPatternFilter.m
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 1,274
|
```objective-c
#import "GPUImageFilterGroup.h"
@interface GPUImageAverageLuminanceThresholdFilter : GPUImageFilterGroup
// This is multiplied by the continually calculated average image luminosity to arrive at the final threshold. Default is 1.0.
@property(readwrite, nonatomic) CGFloat thresholdMultiplier;
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageAverageLuminanceThresholdFilter.h
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 62
|
```objective-c
#import "GPUImageTwoInputCrossTextureSamplingFilter.h"
NSString *const kGPUImageTwoInputNearbyTexelSamplingVertexShaderString = SHADER_STRING
(
attribute vec4 position;
attribute vec4 inputTextureCoordinate;
attribute vec4 inputTextureCoordinate2;
uniform float texelWidth;
uniform float texelHeight;
varying vec2 textureCoordinate;
varying vec2 leftTextureCoordinate;
varying vec2 rightTextureCoordinate;
varying vec2 topTextureCoordinate;
varying vec2 bottomTextureCoordinate;
varying vec2 textureCoordinate2;
varying vec2 leftTextureCoordinate2;
varying vec2 rightTextureCoordinate2;
varying vec2 topTextureCoordinate2;
varying vec2 bottomTextureCoordinate2;
void main()
{
gl_Position = position;
vec2 widthStep = vec2(texelWidth, 0.0);
vec2 heightStep = vec2(0.0, texelHeight);
textureCoordinate = inputTextureCoordinate.xy;
leftTextureCoordinate = inputTextureCoordinate.xy - widthStep;
rightTextureCoordinate = inputTextureCoordinate.xy + widthStep;
topTextureCoordinate = inputTextureCoordinate.xy - heightStep;
bottomTextureCoordinate = inputTextureCoordinate.xy + heightStep;
textureCoordinate2 = inputTextureCoordinate2.xy;
leftTextureCoordinate2 = inputTextureCoordinate2.xy - widthStep;
rightTextureCoordinate2 = inputTextureCoordinate2.xy + widthStep;
topTextureCoordinate2 = inputTextureCoordinate2.xy - heightStep;
bottomTextureCoordinate2 = inputTextureCoordinate2.xy + heightStep;
}
);
@implementation GPUImageTwoInputCrossTextureSamplingFilter
@synthesize texelWidth = _texelWidth;
@synthesize texelHeight = _texelHeight;
#pragma mark -
#pragma mark Initialization and teardown
- (id)initWithFragmentShaderFromString:(NSString *)fragmentShaderString;
{
if (!(self = [super initWithVertexShaderFromString:kGPUImageTwoInputNearbyTexelSamplingVertexShaderString fragmentShaderFromString:fragmentShaderString]))
{
return nil;
}
texelWidthUniform = [filterProgram uniformIndex:@"texelWidth"];
texelHeightUniform = [filterProgram uniformIndex:@"texelHeight"];
return self;
}
- (void)setupFilterForSize:(CGSize)filterFrameSize;
{
if (!hasOverriddenImageSizeFactor)
{
_texelWidth = 1.0 / filterFrameSize.width;
_texelHeight = 1.0 / filterFrameSize.height;
runSynchronouslyOnVideoProcessingQueue(^{
[GPUImageContext setActiveShaderProgram:filterProgram];
if (GPUImageRotationSwapsWidthAndHeight(inputRotation))
{
glUniform1f(texelWidthUniform, _texelHeight);
glUniform1f(texelHeightUniform, _texelWidth);
}
else
{
glUniform1f(texelWidthUniform, _texelWidth);
glUniform1f(texelHeightUniform, _texelHeight);
}
});
}
}
#pragma mark -
#pragma mark Accessors
- (void)setTexelWidth:(CGFloat)newValue;
{
hasOverriddenImageSizeFactor = YES;
_texelWidth = newValue;
[self setFloat:_texelWidth forUniform:texelWidthUniform program:filterProgram];
}
- (void)setTexelHeight:(CGFloat)newValue;
{
hasOverriddenImageSizeFactor = YES;
_texelHeight = newValue;
[self setFloat:_texelHeight forUniform:texelHeightUniform program:filterProgram];
}
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageTwoInputCrossTextureSamplingFilter.m
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 744
|
```objective-c
#import "GPUImageFilter.h"
extern NSString *const kGPUImageColorAveragingVertexShaderString;
@interface GPUImageAverageColor : GPUImageFilter
{
GLint texelWidthUniform, texelHeightUniform;
NSUInteger numberOfStages;
GLubyte *rawImagePixels;
CGSize finalStageSize;
}
// This block is called on the completion of color averaging for a frame
@property(nonatomic, copy) void(^colorAverageProcessingFinishedBlock)(CGFloat redComponent, CGFloat greenComponent, CGFloat blueComponent, CGFloat alphaComponent, CMTime frameTime);
- (void)extractAverageColorAtFrameTime:(CMTime)frameTime;
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageAverageColor.h
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 136
|
```objective-c
#import "GPUImageTwoPassTextureSamplingFilter.h"
// For each pixel, this sets it to the minimum value of the red channel in a rectangular neighborhood extending out dilationRadius pixels from the center.
// This extends out dark features, and is most commonly used with black-and-white thresholded images.
@interface GPUImageErosionFilter : GPUImageTwoPassTextureSamplingFilter
// Acceptable values for erosionRadius, which sets the distance in pixels to sample out from the center, are 1, 2, 3, and 4.
- (id)initWithRadius:(NSUInteger)erosionRadius;
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageErosionFilter.h
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 126
|
```objective-c
#import "GPUImageFilter.h"
@interface GPUImagePolarPixellateFilter : GPUImageFilter {
GLint centerUniform, pixelSizeUniform;
}
// The center about which to apply the distortion, with a default of (0.5, 0.5)
@property(readwrite, nonatomic) CGPoint center;
// The amount of distortion to apply, from (-2.0, -2.0) to (2.0, 2.0), with a default of (0.05, 0.05)
@property(readwrite, nonatomic) CGSize pixelSize;
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImagePolarPixellateFilter.h
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 121
|
```objective-c
#import "GPUImageFilter.h"
@interface GPUImageGammaFilter : GPUImageFilter
{
GLint gammaUniform;
}
// Gamma ranges from 0.0 to 3.0, with 1.0 as the normal level
@property(readwrite, nonatomic) CGFloat gamma;
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageGammaFilter.h
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 61
|
```objective-c
#import "GPUImageHSBFilter.h"
@implementation GPUImageHSBFilter {
float matrix[4][4];
}
- (id)init
{
self = [super init];
if (self) {
[self reset];
}
return self;
}
- (void)reset {
identmat(matrix);
[self _updateColorMatrix];
}
- (void)rotateHue:(float)h {
huerotatemat(matrix, h);
[self _updateColorMatrix];
}
- (void)adjustSaturation:(float)s {
saturatemat(matrix, s);
[self _updateColorMatrix];
}
- (void)adjustBrightness:(float)b {
cscalemat(matrix, b, b, b);
[self _updateColorMatrix];
}
- (void)_updateColorMatrix {
GPUMatrix4x4 gpuMatrix;
gpuMatrix.one.one = matrix[0][0];
gpuMatrix.one.two = matrix[1][0];
gpuMatrix.one.three = matrix[2][0];
gpuMatrix.one.four = matrix[3][0];
gpuMatrix.two.one = matrix[0][1];
gpuMatrix.two.two = matrix[1][1];
gpuMatrix.two.three = matrix[2][1];
gpuMatrix.two.four = matrix[3][1];
gpuMatrix.three.one = matrix[0][2];
gpuMatrix.three.two = matrix[1][2];
gpuMatrix.three.three = matrix[2][2];
gpuMatrix.three.four = matrix[3][2];
gpuMatrix.four.one = matrix[0][3];
gpuMatrix.four.two = matrix[1][3];
gpuMatrix.four.three = matrix[2][3];
gpuMatrix.four.four = matrix[3][3];
self.colorMatrix = gpuMatrix;
}
#pragma mark - Matrix algorithms
/* Matrix algorithms adapted from path_to_url
Note about luminance vector values below from that page:
Where rwgt is 0.3086, gwgt is 0.6094, and bwgt is 0.0820. This is the luminance vector. Notice here that we do not use the standard NTSC weights of 0.299, 0.587, and 0.114. The NTSC weights are only applicable to RGB colors in a gamma 2.2 color space. For linear RGB colors the values above are better.
*/
//#define RLUM (0.3086f)
//#define GLUM (0.6094f)
//#define BLUM (0.0820f)
/* This is the vector value from the PDF specification, and may be closer to what Photoshop uses */
#define RLUM (0.3f)
#define GLUM (0.59f)
#define BLUM (0.11f)
/*
* matrixmult -
* multiply two matricies
*/
static void matrixmult(a,b,c)
float a[4][4], b[4][4], c[4][4];
{
int x, y;
float temp[4][4];
for(y=0; y<4 ; y++)
for(x=0 ; x<4 ; x++) {
temp[y][x] = b[y][0] * a[0][x]
+ b[y][1] * a[1][x]
+ b[y][2] * a[2][x]
+ b[y][3] * a[3][x];
}
for(y=0; y<4; y++)
for(x=0; x<4; x++)
c[y][x] = temp[y][x];
}
/*
* identmat -
* make an identity matrix
*/
static void identmat(matrix)
float matrix[4][4];
{
memset(matrix, 0, sizeof(float[4][4]));
matrix[0][0] = 1.0f;
matrix[1][1] = 1.0f;
matrix[2][2] = 1.0f;
matrix[3][3] = 1.0f;
}
/*
* xformpnt -
* transform a 3D point using a matrix
*/
static void xformpnt(matrix,x,y,z,tx,ty,tz)
float matrix[4][4];
float x,y,z;
float *tx,*ty,*tz;
{
*tx = x*matrix[0][0] + y*matrix[1][0] + z*matrix[2][0] + matrix[3][0];
*ty = x*matrix[0][1] + y*matrix[1][1] + z*matrix[2][1] + matrix[3][1];
*tz = x*matrix[0][2] + y*matrix[1][2] + z*matrix[2][2] + matrix[3][2];
}
/*
* cscalemat -
* make a color scale marix
*/
static void cscalemat(mat,rscale,gscale,bscale)
float mat[4][4];
float rscale, gscale, bscale;
{
float mmat[4][4];
mmat[0][0] = rscale;
mmat[0][1] = 0.0;
mmat[0][2] = 0.0;
mmat[0][3] = 0.0;
mmat[1][0] = 0.0;
mmat[1][1] = gscale;
mmat[1][2] = 0.0;
mmat[1][3] = 0.0;
mmat[2][0] = 0.0;
mmat[2][1] = 0.0;
mmat[2][2] = bscale;
mmat[2][3] = 0.0;
mmat[3][0] = 0.0;
mmat[3][1] = 0.0;
mmat[3][2] = 0.0;
mmat[3][3] = 1.0;
matrixmult(mmat,mat,mat);
}
/*
* saturatemat -
* make a saturation marix
*/
static void saturatemat(mat,sat)
float mat[4][4];
float sat;
{
float mmat[4][4];
float a, b, c, d, e, f, g, h, i;
float rwgt, gwgt, bwgt;
rwgt = RLUM;
gwgt = GLUM;
bwgt = BLUM;
a = (1.0-sat)*rwgt + sat;
b = (1.0-sat)*rwgt;
c = (1.0-sat)*rwgt;
d = (1.0-sat)*gwgt;
e = (1.0-sat)*gwgt + sat;
f = (1.0-sat)*gwgt;
g = (1.0-sat)*bwgt;
h = (1.0-sat)*bwgt;
i = (1.0-sat)*bwgt + sat;
mmat[0][0] = a;
mmat[0][1] = b;
mmat[0][2] = c;
mmat[0][3] = 0.0;
mmat[1][0] = d;
mmat[1][1] = e;
mmat[1][2] = f;
mmat[1][3] = 0.0;
mmat[2][0] = g;
mmat[2][1] = h;
mmat[2][2] = i;
mmat[2][3] = 0.0;
mmat[3][0] = 0.0;
mmat[3][1] = 0.0;
mmat[3][2] = 0.0;
mmat[3][3] = 1.0;
matrixmult(mmat,mat,mat);
}
/*
* xrotate -
* rotate about the x (red) axis
*/
static void xrotatemat(mat,rs,rc)
float mat[4][4];
float rs, rc;
{
float mmat[4][4];
mmat[0][0] = 1.0;
mmat[0][1] = 0.0;
mmat[0][2] = 0.0;
mmat[0][3] = 0.0;
mmat[1][0] = 0.0;
mmat[1][1] = rc;
mmat[1][2] = rs;
mmat[1][3] = 0.0;
mmat[2][0] = 0.0;
mmat[2][1] = -rs;
mmat[2][2] = rc;
mmat[2][3] = 0.0;
mmat[3][0] = 0.0;
mmat[3][1] = 0.0;
mmat[3][2] = 0.0;
mmat[3][3] = 1.0;
matrixmult(mmat,mat,mat);
}
/*
* yrotate -
* rotate about the y (green) axis
*/
static void yrotatemat(mat,rs,rc)
float mat[4][4];
float rs, rc;
{
float mmat[4][4];
mmat[0][0] = rc;
mmat[0][1] = 0.0;
mmat[0][2] = -rs;
mmat[0][3] = 0.0;
mmat[1][0] = 0.0;
mmat[1][1] = 1.0;
mmat[1][2] = 0.0;
mmat[1][3] = 0.0;
mmat[2][0] = rs;
mmat[2][1] = 0.0;
mmat[2][2] = rc;
mmat[2][3] = 0.0;
mmat[3][0] = 0.0;
mmat[3][1] = 0.0;
mmat[3][2] = 0.0;
mmat[3][3] = 1.0;
matrixmult(mmat,mat,mat);
}
/*
* zrotate -
* rotate about the z (blue) axis
*/
static void zrotatemat(mat,rs,rc)
float mat[4][4];
float rs, rc;
{
float mmat[4][4];
mmat[0][0] = rc;
mmat[0][1] = rs;
mmat[0][2] = 0.0;
mmat[0][3] = 0.0;
mmat[1][0] = -rs;
mmat[1][1] = rc;
mmat[1][2] = 0.0;
mmat[1][3] = 0.0;
mmat[2][0] = 0.0;
mmat[2][1] = 0.0;
mmat[2][2] = 1.0;
mmat[2][3] = 0.0;
mmat[3][0] = 0.0;
mmat[3][1] = 0.0;
mmat[3][2] = 0.0;
mmat[3][3] = 1.0;
matrixmult(mmat,mat,mat);
}
/*
* zshear -
* shear z using x and y.
*/
static void zshearmat(mat,dx,dy)
float mat[4][4];
float dx, dy;
{
float mmat[4][4];
mmat[0][0] = 1.0;
mmat[0][1] = 0.0;
mmat[0][2] = dx;
mmat[0][3] = 0.0;
mmat[1][0] = 0.0;
mmat[1][1] = 1.0;
mmat[1][2] = dy;
mmat[1][3] = 0.0;
mmat[2][0] = 0.0;
mmat[2][1] = 0.0;
mmat[2][2] = 1.0;
mmat[2][3] = 0.0;
mmat[3][0] = 0.0;
mmat[3][1] = 0.0;
mmat[3][2] = 0.0;
mmat[3][3] = 1.0;
matrixmult(mmat,mat,mat);
}
/*
* simplehuerotatemat -
* simple hue rotation. This changes luminance
*/
//static void simplehuerotatemat(mat,rot)
//float mat[4][4];
//float rot;
//{
// float mag;
// float xrs, xrc;
// float yrs, yrc;
// float zrs, zrc;
//
// /* rotate the grey vector into positive Z */
// mag = sqrt(2.0);
// xrs = 1.0/mag;
// xrc = 1.0/mag;
// xrotatemat(mat,xrs,xrc);
//
// mag = sqrt(3.0);
// yrs = -1.0/mag;
// yrc = sqrt(2.0)/mag;
// yrotatemat(mat,yrs,yrc);
//
// /* rotate the hue */
// zrs = sin(rot*M_PI/180.0);
// zrc = cos(rot*M_PI/180.0);
// zrotatemat(mat,zrs,zrc);
//
// /* rotate the grey vector back into place */
// yrotatemat(mat,-yrs,yrc);
// xrotatemat(mat,-xrs,xrc);
//}
/*
* huerotatemat -
* rotate the hue, while maintaining luminance.
*/
static void huerotatemat(mat,rot)
float mat[4][4];
float rot;
{
float mmat[4][4];
float mag;
float lx, ly, lz;
float xrs, xrc;
float yrs, yrc;
float zrs, zrc;
float zsx, zsy;
identmat(mmat);
/* rotate the grey vector into positive Z */
mag = sqrt(2.0);
xrs = 1.0/mag;
xrc = 1.0/mag;
xrotatemat(mmat,xrs,xrc);
mag = sqrt(3.0);
yrs = -1.0/mag;
yrc = sqrt(2.0)/mag;
yrotatemat(mmat,yrs,yrc);
/* shear the space to make the luminance plane horizontal */
xformpnt(mmat,RLUM,GLUM,BLUM,&lx,&ly,&lz);
zsx = lx/lz;
zsy = ly/lz;
zshearmat(mmat,zsx,zsy);
/* rotate the hue */
zrs = sin(rot*M_PI/180.0);
zrc = cos(rot*M_PI/180.0);
zrotatemat(mmat,zrs,zrc);
/* unshear the space to put the luminance plane back */
zshearmat(mmat,-zsx,-zsy);
/* rotate the grey vector back into place */
yrotatemat(mmat,-yrs,yrc);
xrotatemat(mmat,-xrs,xrc);
matrixmult(mmat,mat,mat);
}
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageHSBFilter.m
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 3,569
|
```objective-c
#import "GPUImageTwoInputFilter.h"
NSString *const kGPUImageTwoInputTextureVertexShaderString = SHADER_STRING
(
attribute vec4 position;
attribute vec4 inputTextureCoordinate;
attribute vec4 inputTextureCoordinate2;
varying vec2 textureCoordinate;
varying vec2 textureCoordinate2;
void main()
{
gl_Position = position;
textureCoordinate = inputTextureCoordinate.xy;
textureCoordinate2 = inputTextureCoordinate2.xy;
}
);
@implementation GPUImageTwoInputFilter
#pragma mark -
#pragma mark Initialization and teardown
- (id)initWithFragmentShaderFromString:(NSString *)fragmentShaderString;
{
if (!(self = [self initWithVertexShaderFromString:kGPUImageTwoInputTextureVertexShaderString fragmentShaderFromString:fragmentShaderString]))
{
return nil;
}
return self;
}
- (id)initWithVertexShaderFromString:(NSString *)vertexShaderString fragmentShaderFromString:(NSString *)fragmentShaderString;
{
if (!(self = [super initWithVertexShaderFromString:vertexShaderString fragmentShaderFromString:fragmentShaderString]))
{
return nil;
}
inputRotation2 = kGPUImageNoRotation;
hasSetFirstTexture = NO;
hasReceivedFirstFrame = NO;
hasReceivedSecondFrame = NO;
firstFrameWasVideo = NO;
secondFrameWasVideo = NO;
firstFrameCheckDisabled = NO;
secondFrameCheckDisabled = NO;
firstFrameTime = kCMTimeInvalid;
secondFrameTime = kCMTimeInvalid;
runSynchronouslyOnVideoProcessingQueue(^{
[GPUImageContext useImageProcessingContext];
filterSecondTextureCoordinateAttribute = [filterProgram attributeIndex:@"inputTextureCoordinate2"];
filterInputTextureUniform2 = [filterProgram uniformIndex:@"inputImageTexture2"]; // This does assume a name of "inputImageTexture2" for second input texture in the fragment shader
glEnableVertexAttribArray(filterSecondTextureCoordinateAttribute);
});
return self;
}
- (void)initializeAttributes;
{
[super initializeAttributes];
[filterProgram addAttribute:@"inputTextureCoordinate2"];
}
- (void)disableFirstFrameCheck;
{
firstFrameCheckDisabled = YES;
}
- (void)disableSecondFrameCheck;
{
secondFrameCheckDisabled = YES;
}
#pragma mark -
#pragma mark Rendering
- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;
{
if (self.preventRendering)
{
[firstInputFramebuffer unlock];
[secondInputFramebuffer unlock];
return;
}
[GPUImageContext setActiveShaderProgram:filterProgram];
outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:[self sizeOfFBO] textureOptions:self.outputTextureOptions onlyTexture:NO];
[outputFramebuffer activateFramebuffer];
if (usingNextFrameForImageCapture)
{
[outputFramebuffer lock];
}
[self setUniformsForProgramAtIndex:0];
glClearColor(backgroundColorRed, backgroundColorGreen, backgroundColorBlue, backgroundColorAlpha);
glClear(GL_COLOR_BUFFER_BIT);
glActiveTexture(GL_TEXTURE2);
glBindTexture(GL_TEXTURE_2D, [firstInputFramebuffer texture]);
glUniform1i(filterInputTextureUniform, 2);
glActiveTexture(GL_TEXTURE3);
glBindTexture(GL_TEXTURE_2D, [secondInputFramebuffer texture]);
glUniform1i(filterInputTextureUniform2, 3);
glVertexAttribPointer(filterPositionAttribute, 2, GL_FLOAT, 0, 0, vertices);
glVertexAttribPointer(filterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);
glVertexAttribPointer(filterSecondTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, [[self class] textureCoordinatesForRotation:inputRotation2]);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
[firstInputFramebuffer unlock];
[secondInputFramebuffer unlock];
if (usingNextFrameForImageCapture)
{
dispatch_semaphore_signal(imageCaptureSemaphore);
}
}
#pragma mark -
#pragma mark GPUImageInput
- (NSInteger)nextAvailableTextureIndex;
{
if (hasSetFirstTexture)
{
return 1;
}
else
{
return 0;
}
}
- (void)setInputFramebuffer:(GPUImageFramebuffer *)newInputFramebuffer atIndex:(NSInteger)textureIndex;
{
if (textureIndex == 0)
{
firstInputFramebuffer = newInputFramebuffer;
hasSetFirstTexture = YES;
[firstInputFramebuffer lock];
}
else
{
secondInputFramebuffer = newInputFramebuffer;
[secondInputFramebuffer lock];
}
}
- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;
{
if (textureIndex == 0)
{
[super setInputSize:newSize atIndex:textureIndex];
if (CGSizeEqualToSize(newSize, CGSizeZero))
{
hasSetFirstTexture = NO;
}
}
}
- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;
{
if (textureIndex == 0)
{
inputRotation = newInputRotation;
}
else
{
inputRotation2 = newInputRotation;
}
}
- (CGSize)rotatedSize:(CGSize)sizeToRotate forIndex:(NSInteger)textureIndex;
{
CGSize rotatedSize = sizeToRotate;
GPUImageRotationMode rotationToCheck;
if (textureIndex == 0)
{
rotationToCheck = inputRotation;
}
else
{
rotationToCheck = inputRotation2;
}
if (GPUImageRotationSwapsWidthAndHeight(rotationToCheck))
{
rotatedSize.width = sizeToRotate.height;
rotatedSize.height = sizeToRotate.width;
}
return rotatedSize;
}
- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex;
{
// You can set up infinite update loops, so this helps to short circuit them
if (hasReceivedFirstFrame && hasReceivedSecondFrame)
{
return;
}
BOOL updatedMovieFrameOppositeStillImage = NO;
if (textureIndex == 0)
{
hasReceivedFirstFrame = YES;
firstFrameTime = frameTime;
if (secondFrameCheckDisabled)
{
hasReceivedSecondFrame = YES;
}
if (!CMTIME_IS_INDEFINITE(frameTime))
{
if CMTIME_IS_INDEFINITE(secondFrameTime)
{
updatedMovieFrameOppositeStillImage = YES;
}
}
}
else
{
hasReceivedSecondFrame = YES;
secondFrameTime = frameTime;
if (firstFrameCheckDisabled)
{
hasReceivedFirstFrame = YES;
}
if (!CMTIME_IS_INDEFINITE(frameTime))
{
if CMTIME_IS_INDEFINITE(firstFrameTime)
{
updatedMovieFrameOppositeStillImage = YES;
}
}
}
// || (hasReceivedFirstFrame && secondFrameCheckDisabled) || (hasReceivedSecondFrame && firstFrameCheckDisabled)
if ((hasReceivedFirstFrame && hasReceivedSecondFrame) || updatedMovieFrameOppositeStillImage)
{
CMTime passOnFrameTime = (!CMTIME_IS_INDEFINITE(firstFrameTime)) ? firstFrameTime : secondFrameTime;
[super newFrameReadyAtTime:passOnFrameTime atIndex:0]; // Bugfix when trying to record: always use time from first input (unless indefinite, in which case use the second input)
hasReceivedFirstFrame = NO;
hasReceivedSecondFrame = NO;
}
}
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageTwoInputFilter.m
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 1,633
|
```objective-c
#import "GPUImageTwoInputFilter.h"
@interface GPUImageSourceOverBlendFilter : GPUImageTwoInputFilter
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageSourceOverBlendFilter.h
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 26
|
```objective-c
#import "GPUImageColorBlendFilter.h"
/**
* Color blend mode based upon pseudo code from the PDF specification.
*/
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageColorBlendFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
varying highp vec2 textureCoordinate2;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
highp float lum(lowp vec3 c) {
return dot(c, vec3(0.3, 0.59, 0.11));
}
lowp vec3 clipcolor(lowp vec3 c) {
highp float l = lum(c);
lowp float n = min(min(c.r, c.g), c.b);
lowp float x = max(max(c.r, c.g), c.b);
if (n < 0.0) {
c.r = l + ((c.r - l) * l) / (l - n);
c.g = l + ((c.g - l) * l) / (l - n);
c.b = l + ((c.b - l) * l) / (l - n);
}
if (x > 1.0) {
c.r = l + ((c.r - l) * (1.0 - l)) / (x - l);
c.g = l + ((c.g - l) * (1.0 - l)) / (x - l);
c.b = l + ((c.b - l) * (1.0 - l)) / (x - l);
}
return c;
}
lowp vec3 setlum(lowp vec3 c, highp float l) {
highp float d = l - lum(c);
c = c + vec3(d);
return clipcolor(c);
}
void main()
{
highp vec4 baseColor = texture2D(inputImageTexture, textureCoordinate);
highp vec4 overlayColor = texture2D(inputImageTexture2, textureCoordinate2);
gl_FragColor = vec4(baseColor.rgb * (1.0 - overlayColor.a) + setlum(overlayColor.rgb, lum(baseColor.rgb)) * overlayColor.a, baseColor.a);
}
);
#else
NSString *const kGPUImageColorBlendFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
varying vec2 textureCoordinate2;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
float lum(vec3 c) {
return dot(c, vec3(0.3, 0.59, 0.11));
}
vec3 clipcolor(vec3 c) {
float l = lum(c);
float n = min(min(c.r, c.g), c.b);
float x = max(max(c.r, c.g), c.b);
if (n < 0.0) {
c.r = l + ((c.r - l) * l) / (l - n);
c.g = l + ((c.g - l) * l) / (l - n);
c.b = l + ((c.b - l) * l) / (l - n);
}
if (x > 1.0) {
c.r = l + ((c.r - l) * (1.0 - l)) / (x - l);
c.g = l + ((c.g - l) * (1.0 - l)) / (x - l);
c.b = l + ((c.b - l) * (1.0 - l)) / (x - l);
}
return c;
}
vec3 setlum(vec3 c, float l) {
float d = l - lum(c);
c = c + vec3(d);
return clipcolor(c);
}
void main()
{
vec4 baseColor = texture2D(inputImageTexture, textureCoordinate);
vec4 overlayColor = texture2D(inputImageTexture2, textureCoordinate2);
gl_FragColor = vec4(baseColor.rgb * (1.0 - overlayColor.a) + setlum(overlayColor.rgb, lum(baseColor.rgb)) * overlayColor.a, baseColor.a);
}
);
#endif
@implementation GPUImageColorBlendFilter
- (id)init;
{
if (!(self = [super initWithFragmentShaderFromString:kGPUImageColorBlendFragmentShaderString]))
{
return nil;
}
return self;
}
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageColorBlendFilter.m
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 963
|
```objective-c
#import "GPUImageOpacityFilter.h"
@implementation GPUImageOpacityFilter
@synthesize opacity = _opacity;
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageOpacityFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform lowp float opacity;
void main()
{
lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
gl_FragColor = vec4(textureColor.rgb, textureColor.a * opacity);
}
);
#else
NSString *const kGPUImageOpacityFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform float opacity;
void main()
{
vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
gl_FragColor = vec4(textureColor.rgb, textureColor.a * opacity);
}
);
#endif
#pragma mark -
#pragma mark Initialization and teardown
- (id)init;
{
if (!(self = [super initWithFragmentShaderFromString:kGPUImageOpacityFragmentShaderString]))
{
return nil;
}
opacityUniform = [filterProgram uniformIndex:@"opacity"];
self.opacity = 1.0;
return self;
}
#pragma mark -
#pragma mark Accessors
- (void)setOpacity:(CGFloat)newValue;
{
_opacity = newValue;
[self setFloat:_opacity forUniform:opacityUniform program:filterProgram];
}
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageOpacityFilter.m
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 313
|
```objective-c
#import "GPUImageFilterGroup.h"
@class GPUImageErosionFilter;
@class GPUImageDilationFilter;
// A filter that first performs a dilation on the red channel of an image, followed by an erosion of the same radius.
// This helps to filter out smaller dark elements.
@interface GPUImageClosingFilter : GPUImageFilterGroup
{
GPUImageErosionFilter *erosionFilter;
GPUImageDilationFilter *dilationFilter;
}
@property(readwrite, nonatomic) CGFloat verticalTexelSpacing, horizontalTexelSpacing;
- (id)initWithRadius:(NSUInteger)radius;
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageClosingFilter.h
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 125
|
```objective-c
#import "GPUImageTwoInputFilter.h"
@interface GPUImageVoronoiConsumerFilter : GPUImageTwoInputFilter
{
GLint sizeUniform;
}
@property (nonatomic, readwrite) CGSize sizeInPixels;
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageVoronoiConsumerFilter.h
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 46
|
```objective-c
#import "GPUImageFramebufferCache.h"
#import "GPUImageContext.h"
#import "GPUImageOutput.h"
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
#import <UIKit/UIKit.h>
#else
#endif
@interface GPUImageFramebufferCache()
{
// NSCache *framebufferCache;
NSMutableDictionary *framebufferCache;
NSMutableDictionary *framebufferTypeCounts;
NSMutableArray *activeImageCaptureList; // Where framebuffers that may be lost by a filter, but which are still needed for a UIImage, etc., are stored
id memoryWarningObserver;
dispatch_queue_t framebufferCacheQueue;
}
- (NSString *)hashForSize:(CGSize)size textureOptions:(GPUTextureOptions)textureOptions onlyTexture:(BOOL)onlyTexture;
@end
@implementation GPUImageFramebufferCache
#pragma mark -
#pragma mark Initialization and teardown
- (id)init;
{
if (!(self = [super init]))
{
return nil;
}
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
memoryWarningObserver = [[NSNotificationCenter defaultCenter] addObserverForName:UIApplicationDidReceiveMemoryWarningNotification object:nil queue:nil usingBlock:^(NSNotification *note) {
[self purgeAllUnassignedFramebuffers];
}];
#else
#endif
// framebufferCache = [[NSCache alloc] init];
framebufferCache = [[NSMutableDictionary alloc] init];
framebufferTypeCounts = [[NSMutableDictionary alloc] init];
activeImageCaptureList = [[NSMutableArray alloc] init];
framebufferCacheQueue = dispatch_queue_create("com.sunsetlakesoftware.GPUImage.framebufferCacheQueue", NULL);
return self;
}
#pragma mark -
#pragma mark Framebuffer management
- (NSString *)hashForSize:(CGSize)size textureOptions:(GPUTextureOptions)textureOptions onlyTexture:(BOOL)onlyTexture;
{
if (onlyTexture)
{
return [NSString stringWithFormat:@"%.1fx%.1f-%d:%d:%d:%d:%d:%d:%d-NOFB", size.width, size.height, textureOptions.minFilter, textureOptions.magFilter, textureOptions.wrapS, textureOptions.wrapT, textureOptions.internalFormat, textureOptions.format, textureOptions.type];
}
else
{
return [NSString stringWithFormat:@"%.1fx%.1f-%d:%d:%d:%d:%d:%d:%d", size.width, size.height, textureOptions.minFilter, textureOptions.magFilter, textureOptions.wrapS, textureOptions.wrapT, textureOptions.internalFormat, textureOptions.format, textureOptions.type];
}
}
- (GPUImageFramebuffer *)fetchFramebufferForSize:(CGSize)framebufferSize textureOptions:(GPUTextureOptions)textureOptions onlyTexture:(BOOL)onlyTexture;
{
__block GPUImageFramebuffer *framebufferFromCache = nil;
// dispatch_sync(framebufferCacheQueue, ^{
runSynchronouslyOnVideoProcessingQueue(^{
NSString *lookupHash = [self hashForSize:framebufferSize textureOptions:textureOptions onlyTexture:onlyTexture];
NSNumber *numberOfMatchingTexturesInCache = [framebufferTypeCounts objectForKey:lookupHash];
NSInteger numberOfMatchingTextures = [numberOfMatchingTexturesInCache integerValue];
if ([numberOfMatchingTexturesInCache integerValue] < 1)
{
// Nothing in the cache, create a new framebuffer to use
framebufferFromCache = [[GPUImageFramebuffer alloc] initWithSize:framebufferSize textureOptions:textureOptions onlyTexture:onlyTexture];
}
else
{
// Something found, pull the old framebuffer and decrement the count
NSInteger currentTextureID = (numberOfMatchingTextures - 1);
while ((framebufferFromCache == nil) && (currentTextureID >= 0))
{
NSString *textureHash = [NSString stringWithFormat:@"%@-%ld", lookupHash, (long)currentTextureID];
framebufferFromCache = [framebufferCache objectForKey:textureHash];
// Test the values in the cache first, to see if they got invalidated behind our back
if (framebufferFromCache != nil)
{
// Withdraw this from the cache while it's in use
[framebufferCache removeObjectForKey:textureHash];
}
currentTextureID--;
}
currentTextureID++;
[framebufferTypeCounts setObject:[NSNumber numberWithInteger:currentTextureID] forKey:lookupHash];
if (framebufferFromCache == nil)
{
framebufferFromCache = [[GPUImageFramebuffer alloc] initWithSize:framebufferSize textureOptions:textureOptions onlyTexture:onlyTexture];
}
}
});
[framebufferFromCache lock];
return framebufferFromCache;
}
- (GPUImageFramebuffer *)fetchFramebufferForSize:(CGSize)framebufferSize onlyTexture:(BOOL)onlyTexture;
{
GPUTextureOptions defaultTextureOptions;
defaultTextureOptions.minFilter = GL_LINEAR;
defaultTextureOptions.magFilter = GL_LINEAR;
defaultTextureOptions.wrapS = GL_CLAMP_TO_EDGE;
defaultTextureOptions.wrapT = GL_CLAMP_TO_EDGE;
defaultTextureOptions.internalFormat = GL_RGBA;
defaultTextureOptions.format = GL_BGRA;
defaultTextureOptions.type = GL_UNSIGNED_BYTE;
return [self fetchFramebufferForSize:framebufferSize textureOptions:defaultTextureOptions onlyTexture:onlyTexture];
}
- (void)returnFramebufferToCache:(GPUImageFramebuffer *)framebuffer;
{
[framebuffer clearAllLocks];
// dispatch_async(framebufferCacheQueue, ^{
runAsynchronouslyOnVideoProcessingQueue(^{
CGSize framebufferSize = framebuffer.size;
GPUTextureOptions framebufferTextureOptions = framebuffer.textureOptions;
NSString *lookupHash = [self hashForSize:framebufferSize textureOptions:framebufferTextureOptions onlyTexture:framebuffer.missingFramebuffer];
NSNumber *numberOfMatchingTexturesInCache = [framebufferTypeCounts objectForKey:lookupHash];
NSInteger numberOfMatchingTextures = [numberOfMatchingTexturesInCache integerValue];
NSString *textureHash = [NSString stringWithFormat:@"%@-%ld", lookupHash, (long)numberOfMatchingTextures];
// [framebufferCache setObject:framebuffer forKey:textureHash cost:round(framebufferSize.width * framebufferSize.height * 4.0)];
[framebufferCache setObject:framebuffer forKey:textureHash];
[framebufferTypeCounts setObject:[NSNumber numberWithInteger:(numberOfMatchingTextures + 1)] forKey:lookupHash];
});
}
- (void)purgeAllUnassignedFramebuffers;
{
runAsynchronouslyOnVideoProcessingQueue(^{
// dispatch_async(framebufferCacheQueue, ^{
[framebufferCache removeAllObjects];
[framebufferTypeCounts removeAllObjects];
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
CVOpenGLESTextureCacheFlush([[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], 0);
#else
#endif
});
}
- (void)addFramebufferToActiveImageCaptureList:(GPUImageFramebuffer *)framebuffer;
{
runAsynchronouslyOnVideoProcessingQueue(^{
// dispatch_async(framebufferCacheQueue, ^{
[activeImageCaptureList addObject:framebuffer];
});
}
- (void)removeFramebufferFromActiveImageCaptureList:(GPUImageFramebuffer *)framebuffer;
{
runAsynchronouslyOnVideoProcessingQueue(^{
// dispatch_async(framebufferCacheQueue, ^{
[activeImageCaptureList removeObject:framebuffer];
});
}
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageFramebufferCache.m
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 1,501
|
```objective-c
#import "GPUImageLuminosityBlendFilter.h"
/**
* Luminosity blend mode based upon pseudo code from the PDF specification.
*/
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageLuminosityBlendFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
varying highp vec2 textureCoordinate2;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
highp float lum(lowp vec3 c) {
return dot(c, vec3(0.3, 0.59, 0.11));
}
lowp vec3 clipcolor(lowp vec3 c) {
highp float l = lum(c);
lowp float n = min(min(c.r, c.g), c.b);
lowp float x = max(max(c.r, c.g), c.b);
if (n < 0.0) {
c.r = l + ((c.r - l) * l) / (l - n);
c.g = l + ((c.g - l) * l) / (l - n);
c.b = l + ((c.b - l) * l) / (l - n);
}
if (x > 1.0) {
c.r = l + ((c.r - l) * (1.0 - l)) / (x - l);
c.g = l + ((c.g - l) * (1.0 - l)) / (x - l);
c.b = l + ((c.b - l) * (1.0 - l)) / (x - l);
}
return c;
}
lowp vec3 setlum(lowp vec3 c, highp float l) {
highp float d = l - lum(c);
c = c + vec3(d);
return clipcolor(c);
}
void main()
{
highp vec4 baseColor = texture2D(inputImageTexture, textureCoordinate);
highp vec4 overlayColor = texture2D(inputImageTexture2, textureCoordinate2);
gl_FragColor = vec4(baseColor.rgb * (1.0 - overlayColor.a) + setlum(baseColor.rgb, lum(overlayColor.rgb)) * overlayColor.a, baseColor.a);
}
);
#else
NSString *const kGPUImageLuminosityBlendFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
varying vec2 textureCoordinate2;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
float lum(vec3 c) {
return dot(c, vec3(0.3, 0.59, 0.11));
}
vec3 clipcolor(vec3 c) {
float l = lum(c);
float n = min(min(c.r, c.g), c.b);
float x = max(max(c.r, c.g), c.b);
if (n < 0.0) {
c.r = l + ((c.r - l) * l) / (l - n);
c.g = l + ((c.g - l) * l) / (l - n);
c.b = l + ((c.b - l) * l) / (l - n);
}
if (x > 1.0) {
c.r = l + ((c.r - l) * (1.0 - l)) / (x - l);
c.g = l + ((c.g - l) * (1.0 - l)) / (x - l);
c.b = l + ((c.b - l) * (1.0 - l)) / (x - l);
}
return c;
}
vec3 setlum(vec3 c, float l) {
float d = l - lum(c);
c = c + vec3(d);
return clipcolor(c);
}
void main()
{
vec4 baseColor = texture2D(inputImageTexture, textureCoordinate);
vec4 overlayColor = texture2D(inputImageTexture2, textureCoordinate2);
gl_FragColor = vec4(baseColor.rgb * (1.0 - overlayColor.a) + setlum(baseColor.rgb, lum(overlayColor.rgb)) * overlayColor.a, baseColor.a);
}
);
#endif
@implementation GPUImageLuminosityBlendFilter
- (id)init;
{
if (!(self = [super initWithFragmentShaderFromString:kGPUImageLuminosityBlendFragmentShaderString]))
{
return nil;
}
return self;
}
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageLuminosityBlendFilter.m
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 978
|
```objective-c
#import "GPUImageAddBlendFilter.h"
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageAddBlendFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
varying highp vec2 textureCoordinate2;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
void main()
{
lowp vec4 base = texture2D(inputImageTexture, textureCoordinate);
lowp vec4 overlay = texture2D(inputImageTexture2, textureCoordinate2);
mediump float r;
if (overlay.r * base.a + base.r * overlay.a >= overlay.a * base.a) {
r = overlay.a * base.a + overlay.r * (1.0 - base.a) + base.r * (1.0 - overlay.a);
} else {
r = overlay.r + base.r;
}
mediump float g;
if (overlay.g * base.a + base.g * overlay.a >= overlay.a * base.a) {
g = overlay.a * base.a + overlay.g * (1.0 - base.a) + base.g * (1.0 - overlay.a);
} else {
g = overlay.g + base.g;
}
mediump float b;
if (overlay.b * base.a + base.b * overlay.a >= overlay.a * base.a) {
b = overlay.a * base.a + overlay.b * (1.0 - base.a) + base.b * (1.0 - overlay.a);
} else {
b = overlay.b + base.b;
}
mediump float a = overlay.a + base.a - overlay.a * base.a;
gl_FragColor = vec4(r, g, b, a);
}
);
#else
NSString *const kGPUImageAddBlendFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
varying vec2 textureCoordinate2;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
void main()
{
vec4 base = texture2D(inputImageTexture, textureCoordinate);
vec4 overlay = texture2D(inputImageTexture2, textureCoordinate2);
float r;
if (overlay.r * base.a + base.r * overlay.a >= overlay.a * base.a) {
r = overlay.a * base.a + overlay.r * (1.0 - base.a) + base.r * (1.0 - overlay.a);
} else {
r = overlay.r + base.r;
}
float g;
if (overlay.g * base.a + base.g * overlay.a >= overlay.a * base.a) {
g = overlay.a * base.a + overlay.g * (1.0 - base.a) + base.g * (1.0 - overlay.a);
} else {
g = overlay.g + base.g;
}
float b;
if (overlay.b * base.a + base.b * overlay.a >= overlay.a * base.a) {
b = overlay.a * base.a + overlay.b * (1.0 - base.a) + base.b * (1.0 - overlay.a);
} else {
b = overlay.b + base.b;
}
float a = overlay.a + base.a - overlay.a * base.a;
gl_FragColor = vec4(r, g, b, a);
}
);
#endif
@implementation GPUImageAddBlendFilter
- (id)init;
{
if (!(self = [super initWithFragmentShaderFromString:kGPUImageAddBlendFragmentShaderString]))
{
return nil;
}
return self;
}
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageAddBlendFilter.m
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 773
|
```objective-c
// 2448x3264 pixel image = 31,961,088 bytes for uncompressed RGBA
#import "GPUImageStillCamera.h"
void stillImageDataReleaseCallback(void *releaseRefCon, const void *baseAddress)
{
free((void *)baseAddress);
}
void GPUImageCreateResizedSampleBuffer(CVPixelBufferRef cameraFrame, CGSize finalSize, CMSampleBufferRef *sampleBuffer)
{
// CVPixelBufferCreateWithPlanarBytes for YUV input
CGSize originalSize = CGSizeMake(CVPixelBufferGetWidth(cameraFrame), CVPixelBufferGetHeight(cameraFrame));
CVPixelBufferLockBaseAddress(cameraFrame, 0);
GLubyte *sourceImageBytes = CVPixelBufferGetBaseAddress(cameraFrame);
CGDataProviderRef dataProvider = CGDataProviderCreateWithData(NULL, sourceImageBytes, CVPixelBufferGetBytesPerRow(cameraFrame) * originalSize.height, NULL);
CGColorSpaceRef genericRGBColorspace = CGColorSpaceCreateDeviceRGB();
CGImageRef cgImageFromBytes = CGImageCreate((int)originalSize.width, (int)originalSize.height, 8, 32, CVPixelBufferGetBytesPerRow(cameraFrame), genericRGBColorspace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst, dataProvider, NULL, NO, kCGRenderingIntentDefault);
GLubyte *imageData = (GLubyte *) calloc(1, (int)finalSize.width * (int)finalSize.height * 4);
CGContextRef imageContext = CGBitmapContextCreate(imageData, (int)finalSize.width, (int)finalSize.height, 8, (int)finalSize.width * 4, genericRGBColorspace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
CGContextDrawImage(imageContext, CGRectMake(0.0, 0.0, finalSize.width, finalSize.height), cgImageFromBytes);
CGImageRelease(cgImageFromBytes);
CGContextRelease(imageContext);
CGColorSpaceRelease(genericRGBColorspace);
CGDataProviderRelease(dataProvider);
CVPixelBufferRef pixel_buffer = NULL;
CVPixelBufferCreateWithBytes(kCFAllocatorDefault, finalSize.width, finalSize.height, kCVPixelFormatType_32BGRA, imageData, finalSize.width * 4, stillImageDataReleaseCallback, NULL, NULL, &pixel_buffer);
CMVideoFormatDescriptionRef videoInfo = NULL;
CMVideoFormatDescriptionCreateForImageBuffer(NULL, pixel_buffer, &videoInfo);
CMTime frameTime = CMTimeMake(1, 30);
CMSampleTimingInfo timing = {frameTime, frameTime, kCMTimeInvalid};
CMSampleBufferCreateForImageBuffer(kCFAllocatorDefault, pixel_buffer, YES, NULL, NULL, videoInfo, &timing, sampleBuffer);
CVPixelBufferUnlockBaseAddress(cameraFrame, 0);
CFRelease(videoInfo);
CVPixelBufferRelease(pixel_buffer);
}
@interface GPUImageStillCamera ()
{
AVCaptureStillImageOutput *photoOutput;
}
// Methods calling this are responsible for calling dispatch_semaphore_signal(frameRenderingSemaphore) somewhere inside the block
- (void)capturePhotoProcessedUpToFilter:(GPUImageOutput<GPUImageInput> *)finalFilterInChain withImageOnGPUHandler:(void (^)(NSError *error))block;
@end
@implementation GPUImageStillCamera {
BOOL requiresFrontCameraTextureCacheCorruptionWorkaround;
}
@synthesize currentCaptureMetadata = _currentCaptureMetadata;
@synthesize jpegCompressionQuality = _jpegCompressionQuality;
#pragma mark -
#pragma mark Initialization and teardown
- (id)initWithSessionPreset:(NSString *)sessionPreset cameraPosition:(AVCaptureDevicePosition)cameraPosition;
{
if (!(self = [super initWithSessionPreset:sessionPreset cameraPosition:cameraPosition]))
{
return nil;
}
/* Detect iOS version < 6 which require a texture cache corruption workaround */
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wdeprecated-declarations"
requiresFrontCameraTextureCacheCorruptionWorkaround = [[[UIDevice currentDevice] systemVersion] compare:@"6.0" options:NSNumericSearch] == NSOrderedAscending;
#pragma clang diagnostic pop
[self.captureSession beginConfiguration];
photoOutput = [[AVCaptureStillImageOutput alloc] init];
// Having a still photo input set to BGRA and video to YUV doesn't work well, so since I don't have YUV resizing for iPhone 4 yet, kick back to BGRA for that device
// if (captureAsYUV && [GPUImageContext supportsFastTextureUpload])
if (captureAsYUV && [GPUImageContext deviceSupportsRedTextures])
{
BOOL supportsFullYUVRange = NO;
NSArray *supportedPixelFormats = photoOutput.availableImageDataCVPixelFormatTypes;
for (NSNumber *currentPixelFormat in supportedPixelFormats)
{
if ([currentPixelFormat intValue] == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)
{
supportsFullYUVRange = YES;
}
}
if (supportsFullYUVRange)
{
[photoOutput setOutputSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarFullRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
}
else
{
[photoOutput setOutputSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
}
}
else
{
captureAsYUV = NO;
[photoOutput setOutputSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
[videoOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
}
[self.captureSession addOutput:photoOutput];
[self.captureSession commitConfiguration];
self.jpegCompressionQuality = 0.8;
return self;
}
- (id)init;
{
if (!(self = [self initWithSessionPreset:AVCaptureSessionPresetPhoto cameraPosition:AVCaptureDevicePositionBack]))
{
return nil;
}
return self;
}
- (void)removeInputsAndOutputs;
{
[self.captureSession removeOutput:photoOutput];
[super removeInputsAndOutputs];
}
#pragma mark -
#pragma mark Photography controls
- (void)capturePhotoAsSampleBufferWithCompletionHandler:(void (^)(CMSampleBufferRef imageSampleBuffer, NSError *error))block
{
NSLog(@"If you want to use the method capturePhotoAsSampleBufferWithCompletionHandler:, you must comment out the line in GPUImageStillCamera.m in the method initWithSessionPreset:cameraPosition: which sets the CVPixelBufferPixelFormatTypeKey, as well as uncomment the rest of the method capturePhotoAsSampleBufferWithCompletionHandler:. However, if you do this you cannot use any of the photo capture methods to take a photo if you also supply a filter.");
/*dispatch_semaphore_wait(frameRenderingSemaphore, DISPATCH_TIME_FOREVER);
[photoOutput captureStillImageAsynchronouslyFromConnection:[[photoOutput connections] objectAtIndex:0] completionHandler:^(CMSampleBufferRef imageSampleBuffer, NSError *error) {
block(imageSampleBuffer, error);
}];
dispatch_semaphore_signal(frameRenderingSemaphore);
*/
return;
}
- (void)capturePhotoAsImageProcessedUpToFilter:(GPUImageOutput<GPUImageInput> *)finalFilterInChain withCompletionHandler:(void (^)(UIImage *processedImage, NSError *error))block;
{
[self capturePhotoProcessedUpToFilter:finalFilterInChain withImageOnGPUHandler:^(NSError *error) {
UIImage *filteredPhoto = nil;
if(!error){
filteredPhoto = [finalFilterInChain imageFromCurrentFramebuffer];
}
dispatch_semaphore_signal(frameRenderingSemaphore);
block(filteredPhoto, error);
}];
}
- (void)capturePhotoAsImageProcessedUpToFilter:(GPUImageOutput<GPUImageInput> *)finalFilterInChain withOrientation:(UIImageOrientation)orientation withCompletionHandler:(void (^)(UIImage *processedImage, NSError *error))block {
[self capturePhotoProcessedUpToFilter:finalFilterInChain withImageOnGPUHandler:^(NSError *error) {
UIImage *filteredPhoto = nil;
if(!error) {
filteredPhoto = [finalFilterInChain imageFromCurrentFramebufferWithOrientation:orientation];
}
dispatch_semaphore_signal(frameRenderingSemaphore);
block(filteredPhoto, error);
}];
}
- (void)capturePhotoAsJPEGProcessedUpToFilter:(GPUImageOutput<GPUImageInput> *)finalFilterInChain withCompletionHandler:(void (^)(NSData *processedJPEG, NSError *error))block;
{
// reportAvailableMemoryForGPUImage(@"Before Capture");
[self capturePhotoProcessedUpToFilter:finalFilterInChain withImageOnGPUHandler:^(NSError *error) {
NSData *dataForJPEGFile = nil;
if(!error){
@autoreleasepool {
UIImage *filteredPhoto = [finalFilterInChain imageFromCurrentFramebuffer];
dispatch_semaphore_signal(frameRenderingSemaphore);
// reportAvailableMemoryForGPUImage(@"After UIImage generation");
dataForJPEGFile = UIImageJPEGRepresentation(filteredPhoto,self.jpegCompressionQuality);
// reportAvailableMemoryForGPUImage(@"After JPEG generation");
}
// reportAvailableMemoryForGPUImage(@"After autorelease pool");
}else{
dispatch_semaphore_signal(frameRenderingSemaphore);
}
block(dataForJPEGFile, error);
}];
}
- (void)capturePhotoAsJPEGProcessedUpToFilter:(GPUImageOutput<GPUImageInput> *)finalFilterInChain withOrientation:(UIImageOrientation)orientation withCompletionHandler:(void (^)(NSData *processedImage, NSError *error))block {
[self capturePhotoProcessedUpToFilter:finalFilterInChain withImageOnGPUHandler:^(NSError *error) {
NSData *dataForJPEGFile = nil;
if(!error) {
@autoreleasepool {
UIImage *filteredPhoto = [finalFilterInChain imageFromCurrentFramebufferWithOrientation:orientation];
dispatch_semaphore_signal(frameRenderingSemaphore);
dataForJPEGFile = UIImageJPEGRepresentation(filteredPhoto, self.jpegCompressionQuality);
}
} else {
dispatch_semaphore_signal(frameRenderingSemaphore);
}
block(dataForJPEGFile, error);
}];
}
- (void)capturePhotoAsPNGProcessedUpToFilter:(GPUImageOutput<GPUImageInput> *)finalFilterInChain withCompletionHandler:(void (^)(NSData *processedPNG, NSError *error))block;
{
[self capturePhotoProcessedUpToFilter:finalFilterInChain withImageOnGPUHandler:^(NSError *error) {
NSData *dataForPNGFile = nil;
if(!error){
@autoreleasepool {
UIImage *filteredPhoto = [finalFilterInChain imageFromCurrentFramebuffer];
dispatch_semaphore_signal(frameRenderingSemaphore);
dataForPNGFile = UIImagePNGRepresentation(filteredPhoto);
}
}else{
dispatch_semaphore_signal(frameRenderingSemaphore);
}
block(dataForPNGFile, error);
}];
return;
}
- (void)capturePhotoAsPNGProcessedUpToFilter:(GPUImageOutput<GPUImageInput> *)finalFilterInChain withOrientation:(UIImageOrientation)orientation withCompletionHandler:(void (^)(NSData *processedPNG, NSError *error))block;
{
[self capturePhotoProcessedUpToFilter:finalFilterInChain withImageOnGPUHandler:^(NSError *error) {
NSData *dataForPNGFile = nil;
if(!error){
@autoreleasepool {
UIImage *filteredPhoto = [finalFilterInChain imageFromCurrentFramebufferWithOrientation:orientation];
dispatch_semaphore_signal(frameRenderingSemaphore);
dataForPNGFile = UIImagePNGRepresentation(filteredPhoto);
}
}else{
dispatch_semaphore_signal(frameRenderingSemaphore);
}
block(dataForPNGFile, error);
}];
return;
}
#pragma mark - Private Methods
- (void)capturePhotoProcessedUpToFilter:(GPUImageOutput<GPUImageInput> *)finalFilterInChain withImageOnGPUHandler:(void (^)(NSError *error))block
{
dispatch_semaphore_wait(frameRenderingSemaphore, DISPATCH_TIME_FOREVER);
if(photoOutput.isCapturingStillImage){
block([NSError errorWithDomain:AVFoundationErrorDomain code:AVErrorMaximumStillImageCaptureRequestsExceeded userInfo:nil]);
return;
}
[photoOutput captureStillImageAsynchronouslyFromConnection:[[photoOutput connections] objectAtIndex:0] completionHandler:^(CMSampleBufferRef imageSampleBuffer, NSError *error) {
if(imageSampleBuffer == NULL){
block(error);
return;
}
// For now, resize photos to fix within the max texture size of the GPU
CVImageBufferRef cameraFrame = CMSampleBufferGetImageBuffer(imageSampleBuffer);
CGSize sizeOfPhoto = CGSizeMake(CVPixelBufferGetWidth(cameraFrame), CVPixelBufferGetHeight(cameraFrame));
CGSize scaledImageSizeToFitOnGPU = [GPUImageContext sizeThatFitsWithinATextureForSize:sizeOfPhoto];
if (!CGSizeEqualToSize(sizeOfPhoto, scaledImageSizeToFitOnGPU))
{
CMSampleBufferRef sampleBuffer = NULL;
if (CVPixelBufferGetPlaneCount(cameraFrame) > 0)
{
NSAssert(NO, @"Error: no downsampling for YUV input in the framework yet");
}
else
{
GPUImageCreateResizedSampleBuffer(cameraFrame, scaledImageSizeToFitOnGPU, &sampleBuffer);
}
dispatch_semaphore_signal(frameRenderingSemaphore);
[finalFilterInChain useNextFrameForImageCapture];
[self captureOutput:photoOutput didOutputSampleBuffer:sampleBuffer fromConnection:[[photoOutput connections] objectAtIndex:0]];
dispatch_semaphore_wait(frameRenderingSemaphore, DISPATCH_TIME_FOREVER);
if (sampleBuffer != NULL)
CFRelease(sampleBuffer);
}
else
{
// This is a workaround for the corrupt images that are sometimes returned when taking a photo with the front camera and using the iOS 5.0 texture caches
AVCaptureDevicePosition currentCameraPosition = [[videoInput device] position];
if ( (currentCameraPosition != AVCaptureDevicePositionFront) || (![GPUImageContext supportsFastTextureUpload]) || !requiresFrontCameraTextureCacheCorruptionWorkaround)
{
dispatch_semaphore_signal(frameRenderingSemaphore);
[finalFilterInChain useNextFrameForImageCapture];
[self captureOutput:photoOutput didOutputSampleBuffer:imageSampleBuffer fromConnection:[[photoOutput connections] objectAtIndex:0]];
dispatch_semaphore_wait(frameRenderingSemaphore, DISPATCH_TIME_FOREVER);
}
}
CFDictionaryRef metadata = CMCopyDictionaryOfAttachments(NULL, imageSampleBuffer, kCMAttachmentMode_ShouldPropagate);
_currentCaptureMetadata = (__bridge_transfer NSDictionary *)metadata;
block(nil);
_currentCaptureMetadata = nil;
}];
}
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageStillCamera.m
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 3,194
|
```objective-c
#import "GPUImageBrightnessFilter.h"
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageBrightnessFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform lowp float brightness;
void main()
{
lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
gl_FragColor = vec4((textureColor.rgb + vec3(brightness)), textureColor.w);
}
);
#else
NSString *const kGPUImageBrightnessFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform float brightness;
void main()
{
vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
gl_FragColor = vec4((textureColor.rgb + vec3(brightness)), textureColor.w);
}
);
#endif
@implementation GPUImageBrightnessFilter
@synthesize brightness = _brightness;
#pragma mark -
#pragma mark Initialization and teardown
- (id)init;
{
if (!(self = [super initWithFragmentShaderFromString:kGPUImageBrightnessFragmentShaderString]))
{
return nil;
}
brightnessUniform = [filterProgram uniformIndex:@"brightness"];
self.brightness = 0.0;
return self;
}
#pragma mark -
#pragma mark Accessors
- (void)setBrightness:(CGFloat)newValue;
{
_brightness = newValue;
[self setFloat:_brightness forUniform:brightnessUniform program:filterProgram];
}
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageBrightnessFilter.m
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 322
|
```objective-c
#import "GPUImageBoxBlurFilter.h"
@implementation GPUImageBoxBlurFilter
+ (NSString *)vertexShaderForOptimizedBlurOfRadius:(NSUInteger)blurRadius sigma:(CGFloat)sigma;
{
if (blurRadius < 1)
{
return kGPUImageVertexShaderString;
}
// From these weights we calculate the offsets to read interpolated values from
NSUInteger numberOfOptimizedOffsets = MIN(blurRadius / 2 + (blurRadius % 2), 7);
NSMutableString *shaderString = [[NSMutableString alloc] init];
// Header
[shaderString appendFormat:@"\
attribute vec4 position;\n\
attribute vec4 inputTextureCoordinate;\n\
\n\
uniform float texelWidthOffset;\n\
uniform float texelHeightOffset;\n\
\n\
varying vec2 blurCoordinates[%lu];\n\
\n\
void main()\n\
{\n\
gl_Position = position;\n\
\n\
vec2 singleStepOffset = vec2(texelWidthOffset, texelHeightOffset);\n", (unsigned long)(1 + (numberOfOptimizedOffsets * 2))];
// Inner offset loop
[shaderString appendString:@"blurCoordinates[0] = inputTextureCoordinate.xy;\n"];
for (NSUInteger currentOptimizedOffset = 0; currentOptimizedOffset < numberOfOptimizedOffsets; currentOptimizedOffset++)
{
GLfloat optimizedOffset = (GLfloat)(currentOptimizedOffset * 2) + 1.5;
[shaderString appendFormat:@"\
blurCoordinates[%lu] = inputTextureCoordinate.xy + singleStepOffset * %f;\n\
blurCoordinates[%lu] = inputTextureCoordinate.xy - singleStepOffset * %f;\n", (unsigned long)((currentOptimizedOffset * 2) + 1), optimizedOffset, (unsigned long)((currentOptimizedOffset * 2) + 2), optimizedOffset];
}
// Footer
[shaderString appendString:@"}\n"];
return shaderString;
}
+ (NSString *)fragmentShaderForOptimizedBlurOfRadius:(NSUInteger)blurRadius sigma:(CGFloat)sigma;
{
if (blurRadius < 1)
{
return kGPUImagePassthroughFragmentShaderString;
}
NSUInteger numberOfOptimizedOffsets = MIN(blurRadius / 2 + (blurRadius % 2), 7);
NSUInteger trueNumberOfOptimizedOffsets = blurRadius / 2 + (blurRadius % 2);
NSMutableString *shaderString = [[NSMutableString alloc] init];
// Header
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
[shaderString appendFormat:@"\
uniform sampler2D inputImageTexture;\n\
uniform highp float texelWidthOffset;\n\
uniform highp float texelHeightOffset;\n\
\n\
varying highp vec2 blurCoordinates[%lu];\n\
\n\
void main()\n\
{\n\
lowp vec4 sum = vec4(0.0);\n", (unsigned long)(1 + (numberOfOptimizedOffsets * 2)) ];
#else
[shaderString appendFormat:@"\
uniform sampler2D inputImageTexture;\n\
uniform float texelWidthOffset;\n\
uniform float texelHeightOffset;\n\
\n\
varying vec2 blurCoordinates[%lu];\n\
\n\
void main()\n\
{\n\
vec4 sum = vec4(0.0);\n", 1 + (numberOfOptimizedOffsets * 2) ];
#endif
GLfloat boxWeight = 1.0 / (GLfloat)((blurRadius * 2) + 1);
// Inner texture loop
[shaderString appendFormat:@"sum += texture2D(inputImageTexture, blurCoordinates[0]) * %f;\n", boxWeight];
for (NSUInteger currentBlurCoordinateIndex = 0; currentBlurCoordinateIndex < numberOfOptimizedOffsets; currentBlurCoordinateIndex++)
{
[shaderString appendFormat:@"sum += texture2D(inputImageTexture, blurCoordinates[%lu]) * %f;\n", (unsigned long)((currentBlurCoordinateIndex * 2) + 1), boxWeight * 2.0];
[shaderString appendFormat:@"sum += texture2D(inputImageTexture, blurCoordinates[%lu]) * %f;\n", (unsigned long)((currentBlurCoordinateIndex * 2) + 2), boxWeight * 2.0];
}
// If the number of required samples exceeds the amount we can pass in via varyings, we have to do dependent texture reads in the fragment shader
if (trueNumberOfOptimizedOffsets > numberOfOptimizedOffsets)
{
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
[shaderString appendString:@"highp vec2 singleStepOffset = vec2(texelWidthOffset, texelHeightOffset);\n"];
#else
[shaderString appendString:@"vec2 singleStepOffset = vec2(texelWidthOffset, texelHeightOffset);\n"];
#endif
for (NSUInteger currentOverlowTextureRead = numberOfOptimizedOffsets; currentOverlowTextureRead < trueNumberOfOptimizedOffsets; currentOverlowTextureRead++)
{
GLfloat optimizedOffset = (GLfloat)(currentOverlowTextureRead * 2) + 1.5;
[shaderString appendFormat:@"sum += texture2D(inputImageTexture, blurCoordinates[0] + singleStepOffset * %f) * %f;\n", optimizedOffset, boxWeight * 2.0];
[shaderString appendFormat:@"sum += texture2D(inputImageTexture, blurCoordinates[0] - singleStepOffset * %f) * %f;\n", optimizedOffset, boxWeight * 2.0];
}
}
// Footer
[shaderString appendString:@"\
gl_FragColor = sum;\n\
}\n"];
return shaderString;
}
- (void)setupFilterForSize:(CGSize)filterFrameSize;
{
[super setupFilterForSize:filterFrameSize];
if (shouldResizeBlurRadiusWithImageSize == YES)
{
}
}
#pragma mark -
#pragma mark Initialization and teardown
- (id)init;
{
// NSString *currentGaussianBlurVertexShader = [GPUImageGaussianBlurFilter vertexShaderForStandardGaussianOfRadius:4 sigma:2.0];
// NSString *currentGaussianBlurFragmentShader = [GPUImageGaussianBlurFilter fragmentShaderForStandardGaussianOfRadius:4 sigma:2.0];
NSString *currentBoxBlurVertexShader = [[self class] vertexShaderForOptimizedBlurOfRadius:4 sigma:0.0];
NSString *currentBoxBlurFragmentShader = [[self class] fragmentShaderForOptimizedBlurOfRadius:4 sigma:0.0];
if (!(self = [super initWithFirstStageVertexShaderFromString:currentBoxBlurVertexShader firstStageFragmentShaderFromString:currentBoxBlurFragmentShader secondStageVertexShaderFromString:currentBoxBlurVertexShader secondStageFragmentShaderFromString:currentBoxBlurFragmentShader]))
{
return nil;
}
_blurRadiusInPixels = 4.0;
return self;
}
#pragma mark -
#pragma mark Accessors
- (void)setBlurRadiusInPixels:(CGFloat)newValue;
{
CGFloat newBlurRadius = round(round(newValue / 2.0) * 2.0); // For now, only do even radii
if (newBlurRadius != _blurRadiusInPixels)
{
_blurRadiusInPixels = newBlurRadius;
NSString *newGaussianBlurVertexShader = [[self class] vertexShaderForOptimizedBlurOfRadius:_blurRadiusInPixels sigma:0.0];
NSString *newGaussianBlurFragmentShader = [[self class] fragmentShaderForOptimizedBlurOfRadius:_blurRadiusInPixels sigma:0.0];
// NSLog(@"Optimized vertex shader: \n%@", newGaussianBlurVertexShader);
// NSLog(@"Optimized fragment shader: \n%@", newGaussianBlurFragmentShader);
//
[self switchToVertexShader:newGaussianBlurVertexShader fragmentShader:newGaussianBlurFragmentShader];
}
shouldResizeBlurRadiusWithImageSize = NO;
}
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageBoxBlurFilter.m
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 1,771
|
```objective-c
#import "GPUImageFilterGroup.h"
#import "GPUImageBuffer.h"
#import "GPUImageDissolveBlendFilter.h"
@interface GPUImageLowPassFilter : GPUImageFilterGroup
{
GPUImageBuffer *bufferFilter;
GPUImageDissolveBlendFilter *dissolveBlendFilter;
}
// This controls the degree by which the previous accumulated frames are blended with the current one. This ranges from 0.0 to 1.0, with a default of 0.5.
@property(readwrite, nonatomic) CGFloat filterStrength;
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageLowPassFilter.h
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 116
|
```objective-c
#import "GPUImageDirectionalNonMaximumSuppressionFilter.h"
@implementation GPUImageDirectionalNonMaximumSuppressionFilter
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageDirectionalNonmaximumSuppressionFragmentShaderString = SHADER_STRING
(
precision mediump float;
varying highp vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform highp float texelWidth;
uniform highp float texelHeight;
uniform mediump float upperThreshold;
uniform mediump float lowerThreshold;
void main()
{
vec3 currentGradientAndDirection = texture2D(inputImageTexture, textureCoordinate).rgb;
vec2 gradientDirection = ((currentGradientAndDirection.gb * 2.0) - 1.0) * vec2(texelWidth, texelHeight);
float firstSampledGradientMagnitude = texture2D(inputImageTexture, textureCoordinate + gradientDirection).r;
float secondSampledGradientMagnitude = texture2D(inputImageTexture, textureCoordinate - gradientDirection).r;
float multiplier = step(firstSampledGradientMagnitude, currentGradientAndDirection.r);
multiplier = multiplier * step(secondSampledGradientMagnitude, currentGradientAndDirection.r);
float thresholdCompliance = smoothstep(lowerThreshold, upperThreshold, currentGradientAndDirection.r);
multiplier = multiplier * thresholdCompliance;
gl_FragColor = vec4(multiplier, multiplier, multiplier, 1.0);
}
);
#else
NSString *const kGPUImageDirectionalNonmaximumSuppressionFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform float texelWidth;
uniform float texelHeight;
uniform float upperThreshold;
uniform float lowerThreshold;
void main()
{
vec3 currentGradientAndDirection = texture2D(inputImageTexture, textureCoordinate).rgb;
vec2 gradientDirection = ((currentGradientAndDirection.gb * 2.0) - 1.0) * vec2(texelWidth, texelHeight);
float firstSampledGradientMagnitude = texture2D(inputImageTexture, textureCoordinate + gradientDirection).r;
float secondSampledGradientMagnitude = texture2D(inputImageTexture, textureCoordinate - gradientDirection).r;
float multiplier = step(firstSampledGradientMagnitude, currentGradientAndDirection.r);
multiplier = multiplier * step(secondSampledGradientMagnitude, currentGradientAndDirection.r);
float thresholdCompliance = smoothstep(lowerThreshold, upperThreshold, currentGradientAndDirection.r);
multiplier = multiplier * thresholdCompliance;
gl_FragColor = vec4(multiplier, multiplier, multiplier, 1.0);
}
);
#endif
@synthesize texelWidth = _texelWidth;
@synthesize texelHeight = _texelHeight;
@synthesize upperThreshold = _upperThreshold;
@synthesize lowerThreshold = _lowerThreshold;
#pragma mark -
#pragma mark Initialization and teardown
- (id)init;
{
if (!(self = [super initWithFragmentShaderFromString:kGPUImageDirectionalNonmaximumSuppressionFragmentShaderString]))
{
return nil;
}
texelWidthUniform = [filterProgram uniformIndex:@"texelWidth"];
texelHeightUniform = [filterProgram uniformIndex:@"texelHeight"];
upperThresholdUniform = [filterProgram uniformIndex:@"upperThreshold"];
lowerThresholdUniform = [filterProgram uniformIndex:@"lowerThreshold"];
self.upperThreshold = 0.5;
self.lowerThreshold = 0.1;
return self;
}
- (void)setupFilterForSize:(CGSize)filterFrameSize;
{
if (!hasOverriddenImageSizeFactor)
{
_texelWidth = 1.0 / filterFrameSize.width;
_texelHeight = 1.0 / filterFrameSize.height;
runSynchronouslyOnVideoProcessingQueue(^{
[GPUImageContext setActiveShaderProgram:filterProgram];
glUniform1f(texelWidthUniform, _texelWidth);
glUniform1f(texelHeightUniform, _texelHeight);
});
}
}
#pragma mark -
#pragma mark Accessors
- (void)setTexelWidth:(CGFloat)newValue;
{
hasOverriddenImageSizeFactor = YES;
_texelWidth = newValue;
[self setFloat:_texelWidth forUniform:texelWidthUniform program:filterProgram];
}
- (void)setTexelHeight:(CGFloat)newValue;
{
hasOverriddenImageSizeFactor = YES;
_texelHeight = newValue;
[self setFloat:_texelHeight forUniform:texelHeightUniform program:filterProgram];
}
- (void)setLowerThreshold:(CGFloat)newValue;
{
_lowerThreshold = newValue;
[self setFloat:_lowerThreshold forUniform:lowerThresholdUniform program:filterProgram];
}
- (void)setUpperThreshold:(CGFloat)newValue;
{
_upperThreshold = newValue;
[self setFloat:_upperThreshold forUniform:upperThresholdUniform program:filterProgram];
}
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageDirectionalNonMaximumSuppressionFilter.m
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 1,055
|
```objective-c
#import "GPUImageLevelsFilter.h"
/*
** Gamma correction
** Details: path_to_url
*/
#define GammaCorrection(color, gamma) pow(color, 1.0 / gamma)
/*
** Levels control (input (+gamma), output)
** Details: path_to_url
*/
#define LevelsControlInputRange(color, minInput, maxInput) min(max(color - minInput, vec3(0.0)) / (maxInput - minInput), vec3(1.0))
#define LevelsControlInput(color, minInput, gamma, maxInput) GammaCorrection(LevelsControlInputRange(color, minInput, maxInput), gamma)
#define LevelsControlOutputRange(color, minOutput, maxOutput) mix(minOutput, maxOutput, color)
#define LevelsControl(color, minInput, gamma, maxInput, minOutput, maxOutput) LevelsControlOutputRange(LevelsControlInput(color, minInput, gamma, maxInput), minOutput, maxOutput)
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageLevelsFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform mediump vec3 levelMinimum;
uniform mediump vec3 levelMiddle;
uniform mediump vec3 levelMaximum;
uniform mediump vec3 minOutput;
uniform mediump vec3 maxOutput;
void main()
{
mediump vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
gl_FragColor = vec4(LevelsControl(textureColor.rgb, levelMinimum, levelMiddle, levelMaximum, minOutput, maxOutput), textureColor.a);
}
);
#else
NSString *const kGPUImageLevelsFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform vec3 levelMinimum;
uniform vec3 levelMiddle;
uniform vec3 levelMaximum;
uniform vec3 minOutput;
uniform vec3 maxOutput;
void main()
{
vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
gl_FragColor = vec4(LevelsControl(textureColor.rgb, levelMinimum, levelMiddle, levelMaximum, minOutput, maxOutput), textureColor.a);
}
);
#endif
@implementation GPUImageLevelsFilter
#pragma mark -
#pragma mark Initialization and teardown
- (id)init;
{
if (!(self = [super initWithFragmentShaderFromString:kGPUImageLevelsFragmentShaderString]))
{
return nil;
}
minUniform = [filterProgram uniformIndex:@"levelMinimum"];
midUniform = [filterProgram uniformIndex:@"levelMiddle"];
maxUniform = [filterProgram uniformIndex:@"levelMaximum"];
minOutputUniform = [filterProgram uniformIndex:@"minOutput"];
maxOutputUniform = [filterProgram uniformIndex:@"maxOutput"];
[self setRedMin:0.0 gamma:1.0 max:1.0 minOut:0.0 maxOut:1.0];
[self setGreenMin:0.0 gamma:1.0 max:1.0 minOut:0.0 maxOut:1.0];
[self setBlueMin:0.0 gamma:1.0 max:1.0 minOut:0.0 maxOut:1.0];
return self;
}
#pragma mark -
#pragma mark Helpers
- (void)updateUniforms {
[self setVec3:minVector forUniform:minUniform program:filterProgram];
[self setVec3:midVector forUniform:midUniform program:filterProgram];
[self setVec3:maxVector forUniform:maxUniform program:filterProgram];
[self setVec3:minOutputVector forUniform:minOutputUniform program:filterProgram];
[self setVec3:maxOutputVector forUniform:maxOutputUniform program:filterProgram];
}
#pragma mark -
#pragma mark Accessors
- (void)setMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max minOut:(CGFloat)minOut maxOut:(CGFloat)maxOut {
[self setRedMin:min gamma:mid max:max minOut:minOut maxOut:maxOut];
[self setGreenMin:min gamma:mid max:max minOut:minOut maxOut:maxOut];
[self setBlueMin:min gamma:mid max:max minOut:minOut maxOut:maxOut];
}
- (void)setMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max {
[self setMin:min gamma:mid max:max minOut:0.0 maxOut:1.0];
}
- (void)setRedMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max minOut:(CGFloat)minOut maxOut:(CGFloat)maxOut {
minVector.one = min;
midVector.one = mid;
maxVector.one = max;
minOutputVector.one = minOut;
maxOutputVector.one = maxOut;
[self updateUniforms];
}
- (void)setRedMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max {
[self setRedMin:min gamma:mid max:max minOut:0.0 maxOut:1.0];
}
- (void)setGreenMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max minOut:(CGFloat)minOut maxOut:(CGFloat)maxOut {
minVector.two = min;
midVector.two = mid;
maxVector.two = max;
minOutputVector.two = minOut;
maxOutputVector.two = maxOut;
[self updateUniforms];
}
- (void)setGreenMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max {
[self setGreenMin:min gamma:mid max:max minOut:0.0 maxOut:1.0];
}
- (void)setBlueMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max minOut:(CGFloat)minOut maxOut:(CGFloat)maxOut {
minVector.three = min;
midVector.three = mid;
maxVector.three = max;
minOutputVector.three = minOut;
maxOutputVector.three = maxOut;
[self updateUniforms];
}
- (void)setBlueMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max {
[self setBlueMin:min gamma:mid max:max minOut:0.0 maxOut:1.0];
}
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageLevelsFilter.m
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 1,350
|
```objective-c
#import "GPUImageCannyEdgeDetectionFilter.h"
#import "GPUImageGrayscaleFilter.h"
#import "GPUImageDirectionalSobelEdgeDetectionFilter.h"
#import "GPUImageDirectionalNonMaximumSuppressionFilter.h"
#import "GPUImageWeakPixelInclusionFilter.h"
#import "GPUImageSingleComponentGaussianBlurFilter.h"
@implementation GPUImageCannyEdgeDetectionFilter
@synthesize upperThreshold;
@synthesize lowerThreshold;
@synthesize blurRadiusInPixels;
@synthesize blurTexelSpacingMultiplier;
@synthesize texelWidth;
@synthesize texelHeight;
- (id)init;
{
if (!(self = [super init]))
{
return nil;
}
// First pass: convert image to luminance
luminanceFilter = [[GPUImageGrayscaleFilter alloc] init];
[self addFilter:luminanceFilter];
// Second pass: apply a variable Gaussian blur
blurFilter = [[GPUImageSingleComponentGaussianBlurFilter alloc] init];
[self addFilter:blurFilter];
// Third pass: run the Sobel edge detection, with calculated gradient directions, on this blurred image
edgeDetectionFilter = [[GPUImageDirectionalSobelEdgeDetectionFilter alloc] init];
[self addFilter:edgeDetectionFilter];
// Fourth pass: apply non-maximum suppression
nonMaximumSuppressionFilter = [[GPUImageDirectionalNonMaximumSuppressionFilter alloc] init];
[self addFilter:nonMaximumSuppressionFilter];
// Fifth pass: include weak pixels to complete edges
weakPixelInclusionFilter = [[GPUImageWeakPixelInclusionFilter alloc] init];
[self addFilter:weakPixelInclusionFilter];
[luminanceFilter addTarget:blurFilter];
[blurFilter addTarget:edgeDetectionFilter];
[edgeDetectionFilter addTarget:nonMaximumSuppressionFilter];
[nonMaximumSuppressionFilter addTarget:weakPixelInclusionFilter];
self.initialFilters = [NSArray arrayWithObject:luminanceFilter];
// self.terminalFilter = nonMaximumSuppressionFilter;
self.terminalFilter = weakPixelInclusionFilter;
self.blurRadiusInPixels = 2.0;
self.blurTexelSpacingMultiplier = 1.0;
self.upperThreshold = 0.4;
self.lowerThreshold = 0.1;
return self;
}
#pragma mark -
#pragma mark Accessors
- (void)setBlurRadiusInPixels:(CGFloat)newValue;
{
blurFilter.blurRadiusInPixels = newValue;
}
- (CGFloat)blurRadiusInPixels;
{
return blurFilter.blurRadiusInPixels;
}
- (void)setBlurTexelSpacingMultiplier:(CGFloat)newValue;
{
blurFilter.texelSpacingMultiplier = newValue;
}
- (CGFloat)blurTexelSpacingMultiplier;
{
return blurFilter.texelSpacingMultiplier;
}
- (void)setTexelWidth:(CGFloat)newValue;
{
edgeDetectionFilter.texelWidth = newValue;
}
- (CGFloat)texelWidth;
{
return edgeDetectionFilter.texelWidth;
}
- (void)setTexelHeight:(CGFloat)newValue;
{
edgeDetectionFilter.texelHeight = newValue;
}
- (CGFloat)texelHeight;
{
return edgeDetectionFilter.texelHeight;
}
- (void)setUpperThreshold:(CGFloat)newValue;
{
nonMaximumSuppressionFilter.upperThreshold = newValue;
}
- (CGFloat)upperThreshold;
{
return nonMaximumSuppressionFilter.upperThreshold;
}
- (void)setLowerThreshold:(CGFloat)newValue;
{
nonMaximumSuppressionFilter.lowerThreshold = newValue;
}
- (CGFloat)lowerThreshold;
{
return nonMaximumSuppressionFilter.lowerThreshold;
}
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageCannyEdgeDetectionFilter.m
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 765
|
```objective-c
#import "GPUImageColorInvertFilter.h"
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageInvertFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
void main()
{
lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
gl_FragColor = vec4((1.0 - textureColor.rgb), textureColor.w);
}
);
#else
NSString *const kGPUImageInvertFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
void main()
{
vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
gl_FragColor = vec4((1.0 - textureColor.rgb), textureColor.w);
}
);
#endif
@implementation GPUImageColorInvertFilter
- (id)init;
{
if (!(self = [super initWithFragmentShaderFromString:kGPUImageInvertFragmentShaderString]))
{
return nil;
}
return self;
}
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageColorInvertFilter.m
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 237
|
```objective-c
#import "GPUImagePixellateFilter.h"
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImagePixellationFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform highp float fractionalWidthOfPixel;
uniform highp float aspectRatio;
void main()
{
highp vec2 sampleDivisor = vec2(fractionalWidthOfPixel, fractionalWidthOfPixel / aspectRatio);
highp vec2 samplePos = textureCoordinate - mod(textureCoordinate, sampleDivisor) + 0.5 * sampleDivisor;
gl_FragColor = texture2D(inputImageTexture, samplePos );
}
);
#else
NSString *const kGPUImagePixellationFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform float fractionalWidthOfPixel;
uniform float aspectRatio;
void main()
{
vec2 sampleDivisor = vec2(fractionalWidthOfPixel, fractionalWidthOfPixel / aspectRatio);
vec2 samplePos = textureCoordinate - mod(textureCoordinate, sampleDivisor) + 0.5 * sampleDivisor;
gl_FragColor = texture2D(inputImageTexture, samplePos );
}
);
#endif
@interface GPUImagePixellateFilter ()
@property (readwrite, nonatomic) CGFloat aspectRatio;
- (void)adjustAspectRatio;
@end
@implementation GPUImagePixellateFilter
@synthesize fractionalWidthOfAPixel = _fractionalWidthOfAPixel;
@synthesize aspectRatio = _aspectRatio;
#pragma mark -
#pragma mark Initialization and teardown
- (id)init;
{
if (!(self = [self initWithFragmentShaderFromString:kGPUImagePixellationFragmentShaderString]))
{
return nil;
}
return self;
}
- (id)initWithFragmentShaderFromString:(NSString *)fragmentShaderString;
{
if (!(self = [super initWithFragmentShaderFromString:fragmentShaderString]))
{
return nil;
}
fractionalWidthOfAPixelUniform = [filterProgram uniformIndex:@"fractionalWidthOfPixel"];
aspectRatioUniform = [filterProgram uniformIndex:@"aspectRatio"];
self.fractionalWidthOfAPixel = 0.05;
return self;
}
- (void)adjustAspectRatio;
{
if (GPUImageRotationSwapsWidthAndHeight(inputRotation))
{
[self setAspectRatio:(inputTextureSize.width / inputTextureSize.height)];
}
else
{
[self setAspectRatio:(inputTextureSize.height / inputTextureSize.width)];
}
}
- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;
{
[super setInputRotation:newInputRotation atIndex:textureIndex];
[self adjustAspectRatio];
}
- (void)forceProcessingAtSize:(CGSize)frameSize;
{
[super forceProcessingAtSize:frameSize];
[self adjustAspectRatio];
}
- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;
{
CGSize oldInputSize = inputTextureSize;
[super setInputSize:newSize atIndex:textureIndex];
if ( (!CGSizeEqualToSize(oldInputSize, inputTextureSize)) && (!CGSizeEqualToSize(newSize, CGSizeZero)) )
{
[self adjustAspectRatio];
}
}
#pragma mark -
#pragma mark Accessors
- (void)setFractionalWidthOfAPixel:(CGFloat)newValue;
{
CGFloat singlePixelSpacing;
if (inputTextureSize.width != 0.0)
{
singlePixelSpacing = 1.0 / inputTextureSize.width;
}
else
{
singlePixelSpacing = 1.0 / 2048.0;
}
if (newValue < singlePixelSpacing)
{
_fractionalWidthOfAPixel = singlePixelSpacing;
}
else
{
_fractionalWidthOfAPixel = newValue;
}
[self setFloat:_fractionalWidthOfAPixel forUniform:fractionalWidthOfAPixelUniform program:filterProgram];
}
- (void)setAspectRatio:(CGFloat)newValue;
{
_aspectRatio = newValue;
[self setFloat:_aspectRatio forUniform:aspectRatioUniform program:filterProgram];
}
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImagePixellateFilter.m
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 897
|
```objective-c
#import "GPUImageFilterGroup.h"
@class GPUImageErosionFilter;
@class GPUImageDilationFilter;
// A filter that first performs an erosion on the red channel of an image, followed by a dilation of the same radius.
// This helps to filter out smaller bright elements.
@interface GPUImageOpeningFilter : GPUImageFilterGroup
{
GPUImageErosionFilter *erosionFilter;
GPUImageDilationFilter *dilationFilter;
}
@property(readwrite, nonatomic) CGFloat verticalTexelSpacing, horizontalTexelSpacing;
- (id)initWithRadius:(NSUInteger)radius;
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageOpeningFilter.h
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 125
|
```objective-c
#import "GPUImageKuwaharaRadius3Filter.h"
// Sourced from Kyprianidis, J. E., Kang, H., and Doellner, J. "Anisotropic Kuwahara Filtering on the GPU," GPU Pro p.247 (2010).
//
// Original header:
//
// Anisotropic Kuwahara Filtering on the GPU
// by Jan Eric Kyprianidis <www.kyprianidis.com>
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageKuwaharaRadius3FragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
precision highp float;
const vec2 src_size = vec2 (1.0 / 768.0, 1.0 / 1024.0);
void main (void)
{
vec2 uv = textureCoordinate;
float n = float(16); // radius is assumed to be 3
vec3 m0 = vec3(0.0); vec3 m1 = vec3(0.0); vec3 m2 = vec3(0.0); vec3 m3 = vec3(0.0);
vec3 s0 = vec3(0.0); vec3 s1 = vec3(0.0); vec3 s2 = vec3(0.0); vec3 s3 = vec3(0.0);
vec3 c;
vec3 cSq;
c = texture2D(inputImageTexture, uv + vec2(-3,-3) * src_size).rgb;
m0 += c;
s0 += c * c;
c = texture2D(inputImageTexture, uv + vec2(-3,-2) * src_size).rgb;
m0 += c;
s0 += c * c;
c = texture2D(inputImageTexture, uv + vec2(-3,-1) * src_size).rgb;
m0 += c;
s0 += c * c;
c = texture2D(inputImageTexture, uv + vec2(-3,0) * src_size).rgb;
cSq = c * c;
m0 += c;
s0 += cSq;
m1 += c;
s1 += cSq;
c = texture2D(inputImageTexture, uv + vec2(-2,-3) * src_size).rgb;
m0 += c;
s0 += c * c;
c = texture2D(inputImageTexture, uv + vec2(-2,-2) * src_size).rgb;
m0 += c;
s0 += c * c;
c = texture2D(inputImageTexture, uv + vec2(-2,-1) * src_size).rgb;
m0 += c;
s0 += c * c;
c = texture2D(inputImageTexture, uv + vec2(-2,0) * src_size).rgb;
cSq = c * c;
m0 += c;
s0 += cSq;
m1 += c;
s1 += cSq;
c = texture2D(inputImageTexture, uv + vec2(-1,-3) * src_size).rgb;
m0 += c;
s0 += c * c;
c = texture2D(inputImageTexture, uv + vec2(-1,-2) * src_size).rgb;
m0 += c;
s0 += c * c;
c = texture2D(inputImageTexture, uv + vec2(-1,-1) * src_size).rgb;
m0 += c;
s0 += c * c;
c = texture2D(inputImageTexture, uv + vec2(-1,0) * src_size).rgb;
cSq = c * c;
m0 += c;
s0 += cSq;
m1 += c;
s1 += cSq;
c = texture2D(inputImageTexture, uv + vec2(0,-3) * src_size).rgb;
cSq = c * c;
m0 += c;
s0 += cSq;
m3 += c;
s3 += cSq;
c = texture2D(inputImageTexture, uv + vec2(0,-2) * src_size).rgb;
cSq = c * c;
m0 += c;
s0 += cSq;
m3 += c;
s3 += cSq;
c = texture2D(inputImageTexture, uv + vec2(0,-1) * src_size).rgb;
cSq = c * c;
m0 += c;
s0 += cSq;
m3 += c;
s3 += cSq;
c = texture2D(inputImageTexture, uv + vec2(0,0) * src_size).rgb;
cSq = c * c;
m0 += c;
s0 += cSq;
m1 += c;
s1 += cSq;
m2 += c;
s2 += cSq;
m3 += c;
s3 += cSq;
c = texture2D(inputImageTexture, uv + vec2(-3,3) * src_size).rgb;
m1 += c;
s1 += c * c;
c = texture2D(inputImageTexture, uv + vec2(-3,2) * src_size).rgb;
m1 += c;
s1 += c * c;
c = texture2D(inputImageTexture, uv + vec2(-3,1) * src_size).rgb;
m1 += c;
s1 += c * c;
c = texture2D(inputImageTexture, uv + vec2(-2,3) * src_size).rgb;
m1 += c;
s1 += c * c;
c = texture2D(inputImageTexture, uv + vec2(-2,2) * src_size).rgb;
m1 += c;
s1 += c * c;
c = texture2D(inputImageTexture, uv + vec2(-2,1) * src_size).rgb;
m1 += c;
s1 += c * c;
c = texture2D(inputImageTexture, uv + vec2(-1,3) * src_size).rgb;
m1 += c;
s1 += c * c;
c = texture2D(inputImageTexture, uv + vec2(-1,2) * src_size).rgb;
m1 += c;
s1 += c * c;
c = texture2D(inputImageTexture, uv + vec2(-1,1) * src_size).rgb;
m1 += c;
s1 += c * c;
c = texture2D(inputImageTexture, uv + vec2(0,3) * src_size).rgb;
cSq = c * c;
m1 += c;
s1 += cSq;
m2 += c;
s2 += cSq;
c = texture2D(inputImageTexture, uv + vec2(0,2) * src_size).rgb;
cSq = c * c;
m1 += c;
s1 += cSq;
m2 += c;
s2 += cSq;
c = texture2D(inputImageTexture, uv + vec2(0,1) * src_size).rgb;
cSq = c * c;
m1 += c;
s1 += cSq;
m2 += c;
s2 += cSq;
c = texture2D(inputImageTexture, uv + vec2(3,3) * src_size).rgb;
m2 += c;
s2 += c * c;
c = texture2D(inputImageTexture, uv + vec2(3,2) * src_size).rgb;
m2 += c;
s2 += c * c;
c = texture2D(inputImageTexture, uv + vec2(3,1) * src_size).rgb;
m2 += c;
s2 += c * c;
c = texture2D(inputImageTexture, uv + vec2(3,0) * src_size).rgb;
cSq = c * c;
m2 += c;
s2 += cSq;
m3 += c;
s3 += cSq;
c = texture2D(inputImageTexture, uv + vec2(2,3) * src_size).rgb;
m2 += c;
s2 += c * c;
c = texture2D(inputImageTexture, uv + vec2(2,2) * src_size).rgb;
m2 += c;
s2 += c * c;
c = texture2D(inputImageTexture, uv + vec2(2,1) * src_size).rgb;
m2 += c;
s2 += c * c;
c = texture2D(inputImageTexture, uv + vec2(2,0) * src_size).rgb;
cSq = c * c;
m2 += c;
s2 += cSq;
m3 += c;
s3 += cSq;
c = texture2D(inputImageTexture, uv + vec2(1,3) * src_size).rgb;
m2 += c;
s2 += c * c;
c = texture2D(inputImageTexture, uv + vec2(1,2) * src_size).rgb;
m2 += c;
s2 += c * c;
c = texture2D(inputImageTexture, uv + vec2(1,1) * src_size).rgb;
m2 += c;
s2 += c * c;
c = texture2D(inputImageTexture, uv + vec2(1,0) * src_size).rgb;
cSq = c * c;
m2 += c;
s2 += cSq;
m3 += c;
s3 += cSq;
c = texture2D(inputImageTexture, uv + vec2(3,-3) * src_size).rgb;
m3 += c;
s3 += c * c;
c = texture2D(inputImageTexture, uv + vec2(3,-2) * src_size).rgb;
m3 += c;
s3 += c * c;
c = texture2D(inputImageTexture, uv + vec2(3,-1) * src_size).rgb;
m3 += c;
s3 += c * c;
c = texture2D(inputImageTexture, uv + vec2(2,-3) * src_size).rgb;
m3 += c;
s3 += c * c;
c = texture2D(inputImageTexture, uv + vec2(2,-2) * src_size).rgb;
m3 += c;
s3 += c * c;
c = texture2D(inputImageTexture, uv + vec2(2,-1) * src_size).rgb;
m3 += c;
s3 += c * c;
c = texture2D(inputImageTexture, uv + vec2(1,-3) * src_size).rgb;
m3 += c;
s3 += c * c;
c = texture2D(inputImageTexture, uv + vec2(1,-2) * src_size).rgb;
m3 += c;
s3 += c * c;
c = texture2D(inputImageTexture, uv + vec2(1,-1) * src_size).rgb;
m3 += c;
s3 += c * c;
float min_sigma2 = 1e+2;
m0 /= n;
s0 = abs(s0 / n - m0 * m0);
float sigma2 = s0.r + s0.g + s0.b;
if (sigma2 < min_sigma2) {
min_sigma2 = sigma2;
gl_FragColor = vec4(m0, 1.0);
}
m1 /= n;
s1 = abs(s1 / n - m1 * m1);
sigma2 = s1.r + s1.g + s1.b;
if (sigma2 < min_sigma2) {
min_sigma2 = sigma2;
gl_FragColor = vec4(m1, 1.0);
}
m2 /= n;
s2 = abs(s2 / n - m2 * m2);
sigma2 = s2.r + s2.g + s2.b;
if (sigma2 < min_sigma2) {
min_sigma2 = sigma2;
gl_FragColor = vec4(m2, 1.0);
}
m3 /= n;
s3 = abs(s3 / n - m3 * m3);
sigma2 = s3.r + s3.g + s3.b;
if (sigma2 < min_sigma2) {
min_sigma2 = sigma2;
gl_FragColor = vec4(m3, 1.0);
}
}
);
#else
NSString *const kGPUImageKuwaharaRadius3FragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
const vec2 src_size = vec2 (1.0 / 768.0, 1.0 / 1024.0);
void main (void)
{
vec2 uv = textureCoordinate;
float n = float(16); // radius is assumed to be 3
vec3 m0 = vec3(0.0); vec3 m1 = vec3(0.0); vec3 m2 = vec3(0.0); vec3 m3 = vec3(0.0);
vec3 s0 = vec3(0.0); vec3 s1 = vec3(0.0); vec3 s2 = vec3(0.0); vec3 s3 = vec3(0.0);
vec3 c;
vec3 cSq;
c = texture2D(inputImageTexture, uv + vec2(-3,-3) * src_size).rgb;
m0 += c;
s0 += c * c;
c = texture2D(inputImageTexture, uv + vec2(-3,-2) * src_size).rgb;
m0 += c;
s0 += c * c;
c = texture2D(inputImageTexture, uv + vec2(-3,-1) * src_size).rgb;
m0 += c;
s0 += c * c;
c = texture2D(inputImageTexture, uv + vec2(-3,0) * src_size).rgb;
cSq = c * c;
m0 += c;
s0 += cSq;
m1 += c;
s1 += cSq;
c = texture2D(inputImageTexture, uv + vec2(-2,-3) * src_size).rgb;
m0 += c;
s0 += c * c;
c = texture2D(inputImageTexture, uv + vec2(-2,-2) * src_size).rgb;
m0 += c;
s0 += c * c;
c = texture2D(inputImageTexture, uv + vec2(-2,-1) * src_size).rgb;
m0 += c;
s0 += c * c;
c = texture2D(inputImageTexture, uv + vec2(-2,0) * src_size).rgb;
cSq = c * c;
m0 += c;
s0 += cSq;
m1 += c;
s1 += cSq;
c = texture2D(inputImageTexture, uv + vec2(-1,-3) * src_size).rgb;
m0 += c;
s0 += c * c;
c = texture2D(inputImageTexture, uv + vec2(-1,-2) * src_size).rgb;
m0 += c;
s0 += c * c;
c = texture2D(inputImageTexture, uv + vec2(-1,-1) * src_size).rgb;
m0 += c;
s0 += c * c;
c = texture2D(inputImageTexture, uv + vec2(-1,0) * src_size).rgb;
cSq = c * c;
m0 += c;
s0 += cSq;
m1 += c;
s1 += cSq;
c = texture2D(inputImageTexture, uv + vec2(0,-3) * src_size).rgb;
cSq = c * c;
m0 += c;
s0 += cSq;
m3 += c;
s3 += cSq;
c = texture2D(inputImageTexture, uv + vec2(0,-2) * src_size).rgb;
cSq = c * c;
m0 += c;
s0 += cSq;
m3 += c;
s3 += cSq;
c = texture2D(inputImageTexture, uv + vec2(0,-1) * src_size).rgb;
cSq = c * c;
m0 += c;
s0 += cSq;
m3 += c;
s3 += cSq;
c = texture2D(inputImageTexture, uv + vec2(0,0) * src_size).rgb;
cSq = c * c;
m0 += c;
s0 += cSq;
m1 += c;
s1 += cSq;
m2 += c;
s2 += cSq;
m3 += c;
s3 += cSq;
c = texture2D(inputImageTexture, uv + vec2(-3,3) * src_size).rgb;
m1 += c;
s1 += c * c;
c = texture2D(inputImageTexture, uv + vec2(-3,2) * src_size).rgb;
m1 += c;
s1 += c * c;
c = texture2D(inputImageTexture, uv + vec2(-3,1) * src_size).rgb;
m1 += c;
s1 += c * c;
c = texture2D(inputImageTexture, uv + vec2(-2,3) * src_size).rgb;
m1 += c;
s1 += c * c;
c = texture2D(inputImageTexture, uv + vec2(-2,2) * src_size).rgb;
m1 += c;
s1 += c * c;
c = texture2D(inputImageTexture, uv + vec2(-2,1) * src_size).rgb;
m1 += c;
s1 += c * c;
c = texture2D(inputImageTexture, uv + vec2(-1,3) * src_size).rgb;
m1 += c;
s1 += c * c;
c = texture2D(inputImageTexture, uv + vec2(-1,2) * src_size).rgb;
m1 += c;
s1 += c * c;
c = texture2D(inputImageTexture, uv + vec2(-1,1) * src_size).rgb;
m1 += c;
s1 += c * c;
c = texture2D(inputImageTexture, uv + vec2(0,3) * src_size).rgb;
cSq = c * c;
m1 += c;
s1 += cSq;
m2 += c;
s2 += cSq;
c = texture2D(inputImageTexture, uv + vec2(0,2) * src_size).rgb;
cSq = c * c;
m1 += c;
s1 += cSq;
m2 += c;
s2 += cSq;
c = texture2D(inputImageTexture, uv + vec2(0,1) * src_size).rgb;
cSq = c * c;
m1 += c;
s1 += cSq;
m2 += c;
s2 += cSq;
c = texture2D(inputImageTexture, uv + vec2(3,3) * src_size).rgb;
m2 += c;
s2 += c * c;
c = texture2D(inputImageTexture, uv + vec2(3,2) * src_size).rgb;
m2 += c;
s2 += c * c;
c = texture2D(inputImageTexture, uv + vec2(3,1) * src_size).rgb;
m2 += c;
s2 += c * c;
c = texture2D(inputImageTexture, uv + vec2(3,0) * src_size).rgb;
cSq = c * c;
m2 += c;
s2 += cSq;
m3 += c;
s3 += cSq;
c = texture2D(inputImageTexture, uv + vec2(2,3) * src_size).rgb;
m2 += c;
s2 += c * c;
c = texture2D(inputImageTexture, uv + vec2(2,2) * src_size).rgb;
m2 += c;
s2 += c * c;
c = texture2D(inputImageTexture, uv + vec2(2,1) * src_size).rgb;
m2 += c;
s2 += c * c;
c = texture2D(inputImageTexture, uv + vec2(2,0) * src_size).rgb;
cSq = c * c;
m2 += c;
s2 += cSq;
m3 += c;
s3 += cSq;
c = texture2D(inputImageTexture, uv + vec2(1,3) * src_size).rgb;
m2 += c;
s2 += c * c;
c = texture2D(inputImageTexture, uv + vec2(1,2) * src_size).rgb;
m2 += c;
s2 += c * c;
c = texture2D(inputImageTexture, uv + vec2(1,1) * src_size).rgb;
m2 += c;
s2 += c * c;
c = texture2D(inputImageTexture, uv + vec2(1,0) * src_size).rgb;
cSq = c * c;
m2 += c;
s2 += cSq;
m3 += c;
s3 += cSq;
c = texture2D(inputImageTexture, uv + vec2(3,-3) * src_size).rgb;
m3 += c;
s3 += c * c;
c = texture2D(inputImageTexture, uv + vec2(3,-2) * src_size).rgb;
m3 += c;
s3 += c * c;
c = texture2D(inputImageTexture, uv + vec2(3,-1) * src_size).rgb;
m3 += c;
s3 += c * c;
c = texture2D(inputImageTexture, uv + vec2(2,-3) * src_size).rgb;
m3 += c;
s3 += c * c;
c = texture2D(inputImageTexture, uv + vec2(2,-2) * src_size).rgb;
m3 += c;
s3 += c * c;
c = texture2D(inputImageTexture, uv + vec2(2,-1) * src_size).rgb;
m3 += c;
s3 += c * c;
c = texture2D(inputImageTexture, uv + vec2(1,-3) * src_size).rgb;
m3 += c;
s3 += c * c;
c = texture2D(inputImageTexture, uv + vec2(1,-2) * src_size).rgb;
m3 += c;
s3 += c * c;
c = texture2D(inputImageTexture, uv + vec2(1,-1) * src_size).rgb;
m3 += c;
s3 += c * c;
float min_sigma2 = 1e+2;
m0 /= n;
s0 = abs(s0 / n - m0 * m0);
float sigma2 = s0.r + s0.g + s0.b;
if (sigma2 < min_sigma2) {
min_sigma2 = sigma2;
gl_FragColor = vec4(m0, 1.0);
}
m1 /= n;
s1 = abs(s1 / n - m1 * m1);
sigma2 = s1.r + s1.g + s1.b;
if (sigma2 < min_sigma2) {
min_sigma2 = sigma2;
gl_FragColor = vec4(m1, 1.0);
}
m2 /= n;
s2 = abs(s2 / n - m2 * m2);
sigma2 = s2.r + s2.g + s2.b;
if (sigma2 < min_sigma2) {
min_sigma2 = sigma2;
gl_FragColor = vec4(m2, 1.0);
}
m3 /= n;
s3 = abs(s3 / n - m3 * m3);
sigma2 = s3.r + s3.g + s3.b;
if (sigma2 < min_sigma2) {
min_sigma2 = sigma2;
gl_FragColor = vec4(m3, 1.0);
}
}
);
#endif
@implementation GPUImageKuwaharaRadius3Filter
#pragma mark -
#pragma mark Initialization and teardown
- (id)init;
{
if (!(self = [super initWithFragmentShaderFromString:kGPUImageKuwaharaRadius3FragmentShaderString]))
{
return nil;
}
return self;
}
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageKuwaharaRadius3Filter.m
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 5,639
|
```objective-c
#import "GPUImageFilterGroup.h"
@class GPUImageRGBErosionFilter;
@class GPUImageRGBDilationFilter;
// A filter that first performs a dilation on each color channel of an image, followed by an erosion of the same radius.
// This helps to filter out smaller dark elements.
@interface GPUImageRGBClosingFilter : GPUImageFilterGroup
{
GPUImageRGBErosionFilter *erosionFilter;
GPUImageRGBDilationFilter *dilationFilter;
}
- (id)initWithRadius:(NSUInteger)radius;
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageRGBClosingFilter.h
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 114
|
```objective-c
#import "GPUImageClosingFilter.h"
#import "GPUImageErosionFilter.h"
#import "GPUImageDilationFilter.h"
@implementation GPUImageClosingFilter
@synthesize verticalTexelSpacing = _verticalTexelSpacing;
@synthesize horizontalTexelSpacing = _horizontalTexelSpacing;
- (id)init;
{
if (!(self = [self initWithRadius:1]))
{
return nil;
}
return self;
}
- (id)initWithRadius:(NSUInteger)radius;
{
if (!(self = [super init]))
{
return nil;
}
// First pass: dilation
dilationFilter = [[GPUImageDilationFilter alloc] initWithRadius:radius];
[self addFilter:dilationFilter];
// Second pass: erosion
erosionFilter = [[GPUImageErosionFilter alloc] initWithRadius:radius];
[self addFilter:erosionFilter];
[dilationFilter addTarget:erosionFilter];
self.initialFilters = [NSArray arrayWithObjects:dilationFilter, nil];
self.terminalFilter = erosionFilter;
return self;
}
- (void)setVerticalTexelSpacing:(CGFloat)newValue;
{
_verticalTexelSpacing = newValue;
erosionFilter.verticalTexelSpacing = newValue;
dilationFilter.verticalTexelSpacing = newValue;
}
- (void)setHorizontalTexelSpacing:(CGFloat)newValue;
{
_horizontalTexelSpacing = newValue;
erosionFilter.horizontalTexelSpacing = newValue;
dilationFilter.horizontalTexelSpacing = newValue;
}
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageClosingFilter.m
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 321
|
```objective-c
#import "GPUImageFilter.h"
@interface GPUImageExposureFilter : GPUImageFilter
{
GLint exposureUniform;
}
// Exposure ranges from -10.0 to 10.0, with 0.0 as the normal level
@property(readwrite, nonatomic) CGFloat exposure;
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageExposureFilter.h
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 62
|
```objective-c
#import "GPUImageTwoInputFilter.h"
@interface GPUImageAddBlendFilter : GPUImageTwoInputFilter
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageAddBlendFilter.h
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 25
|
```objective-c
#import "GPUImageWhiteBalanceFilter.h"
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageWhiteBalanceFragmentShaderString = SHADER_STRING
(
uniform sampler2D inputImageTexture;
varying highp vec2 textureCoordinate;
uniform lowp float temperature;
uniform lowp float tint;
const lowp vec3 warmFilter = vec3(0.93, 0.54, 0.0);
const mediump mat3 RGBtoYIQ = mat3(0.299, 0.587, 0.114, 0.596, -0.274, -0.322, 0.212, -0.523, 0.311);
const mediump mat3 YIQtoRGB = mat3(1.0, 0.956, 0.621, 1.0, -0.272, -0.647, 1.0, -1.105, 1.702);
void main()
{
lowp vec4 source = texture2D(inputImageTexture, textureCoordinate);
mediump vec3 yiq = RGBtoYIQ * source.rgb; //adjusting tint
yiq.b = clamp(yiq.b + tint*0.5226*0.1, -0.5226, 0.5226);
lowp vec3 rgb = YIQtoRGB * yiq;
lowp vec3 processed = vec3(
(rgb.r < 0.5 ? (2.0 * rgb.r * warmFilter.r) : (1.0 - 2.0 * (1.0 - rgb.r) * (1.0 - warmFilter.r))), //adjusting temperature
(rgb.g < 0.5 ? (2.0 * rgb.g * warmFilter.g) : (1.0 - 2.0 * (1.0 - rgb.g) * (1.0 - warmFilter.g))),
(rgb.b < 0.5 ? (2.0 * rgb.b * warmFilter.b) : (1.0 - 2.0 * (1.0 - rgb.b) * (1.0 - warmFilter.b))));
gl_FragColor = vec4(mix(rgb, processed, temperature), source.a);
}
);
#else
NSString *const kGPUImageWhiteBalanceFragmentShaderString = SHADER_STRING
(
uniform sampler2D inputImageTexture;
varying vec2 textureCoordinate;
uniform float temperature;
uniform float tint;
const vec3 warmFilter = vec3(0.93, 0.54, 0.0);
const mat3 RGBtoYIQ = mat3(0.299, 0.587, 0.114, 0.596, -0.274, -0.322, 0.212, -0.523, 0.311);
const mat3 YIQtoRGB = mat3(1.0, 0.956, 0.621, 1.0, -0.272, -0.647, 1.0, -1.105, 1.702);
void main()
{
vec4 source = texture2D(inputImageTexture, textureCoordinate);
vec3 yiq = RGBtoYIQ * source.rgb; //adjusting tint
yiq.b = clamp(yiq.b + tint*0.5226*0.1, -0.5226, 0.5226);
vec3 rgb = YIQtoRGB * yiq;
vec3 processed = vec3(
(rgb.r < 0.5 ? (2.0 * rgb.r * warmFilter.r) : (1.0 - 2.0 * (1.0 - rgb.r) * (1.0 - warmFilter.r))), //adjusting temperature
(rgb.g < 0.5 ? (2.0 * rgb.g * warmFilter.g) : (1.0 - 2.0 * (1.0 - rgb.g) * (1.0 - warmFilter.g))),
(rgb.b < 0.5 ? (2.0 * rgb.b * warmFilter.b) : (1.0 - 2.0 * (1.0 - rgb.b) * (1.0 - warmFilter.b))));
gl_FragColor = vec4(mix(rgb, processed, temperature), source.a);
}
);
#endif
@implementation GPUImageWhiteBalanceFilter
@synthesize temperature = _temperature;
@synthesize tint = _tint;
#pragma mark -
#pragma mark Initialization and teardown
- (id)init;
{
if (!(self = [super initWithFragmentShaderFromString:kGPUImageWhiteBalanceFragmentShaderString]))
{
return nil;
}
temperatureUniform = [filterProgram uniformIndex:@"temperature"];
tintUniform = [filterProgram uniformIndex:@"tint"];
self.temperature = 5000.0;
self.tint = 0.0;
return self;
}
#pragma mark -
#pragma mark Accessors
- (void)setTemperature:(CGFloat)newValue;
{
_temperature = newValue;
[self setFloat:_temperature < 5000 ? 0.0004 * (_temperature-5000.0) : 0.00006 * (_temperature-5000.0) forUniform:temperatureUniform program:filterProgram];
}
- (void)setTint:(CGFloat)newValue;
{
_tint = newValue;
[self setFloat:_tint / 100.0 forUniform:tintUniform program:filterProgram];
}
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageWhiteBalanceFilter.m
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 1,172
|
```objective-c
#import "GPUImageGaussianBlurFilter.h"
// This filter merely performs the standard Gaussian blur on the red color channel (assuming a luminance image)
@interface GPUImageSingleComponentGaussianBlurFilter : GPUImageGaussianBlurFilter
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageSingleComponentGaussianBlurFilter.h
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 51
|
```objective-c
#import "GPUImagePoissonBlendFilter.h"
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImagePoissonBlendFragmentShaderString = SHADER_STRING
(
precision mediump float;
varying vec2 textureCoordinate;
varying vec2 leftTextureCoordinate;
varying vec2 rightTextureCoordinate;
varying vec2 topTextureCoordinate;
varying vec2 bottomTextureCoordinate;
varying vec2 textureCoordinate2;
varying vec2 leftTextureCoordinate2;
varying vec2 rightTextureCoordinate2;
varying vec2 topTextureCoordinate2;
varying vec2 bottomTextureCoordinate2;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
uniform lowp float mixturePercent;
void main()
{
vec4 centerColor = texture2D(inputImageTexture, textureCoordinate);
vec3 bottomColor = texture2D(inputImageTexture, bottomTextureCoordinate).rgb;
vec3 leftColor = texture2D(inputImageTexture, leftTextureCoordinate).rgb;
vec3 rightColor = texture2D(inputImageTexture, rightTextureCoordinate).rgb;
vec3 topColor = texture2D(inputImageTexture, topTextureCoordinate).rgb;
vec4 centerColor2 = texture2D(inputImageTexture2, textureCoordinate2);
vec3 bottomColor2 = texture2D(inputImageTexture2, bottomTextureCoordinate2).rgb;
vec3 leftColor2 = texture2D(inputImageTexture2, leftTextureCoordinate2).rgb;
vec3 rightColor2 = texture2D(inputImageTexture2, rightTextureCoordinate2).rgb;
vec3 topColor2 = texture2D(inputImageTexture2, topTextureCoordinate2).rgb;
vec3 meanColor = (bottomColor + leftColor + rightColor + topColor) / 4.0;
vec3 diffColor = centerColor.rgb - meanColor;
vec3 meanColor2 = (bottomColor2 + leftColor2 + rightColor2 + topColor2) / 4.0;
vec3 diffColor2 = centerColor2.rgb - meanColor2;
vec3 gradColor = (meanColor + diffColor2);
gl_FragColor = vec4(mix(centerColor.rgb, gradColor, centerColor2.a * mixturePercent), centerColor.a);
}
);
#else
NSString *const kGPUImagePoissonBlendFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
varying vec2 leftTextureCoordinate;
varying vec2 rightTextureCoordinate;
varying vec2 topTextureCoordinate;
varying vec2 bottomTextureCoordinate;
varying vec2 textureCoordinate2;
varying vec2 leftTextureCoordinate2;
varying vec2 rightTextureCoordinate2;
varying vec2 topTextureCoordinate2;
varying vec2 bottomTextureCoordinate2;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
uniform float mixturePercent;
void main()
{
vec4 centerColor = texture2D(inputImageTexture, textureCoordinate);
vec3 bottomColor = texture2D(inputImageTexture, bottomTextureCoordinate).rgb;
vec3 leftColor = texture2D(inputImageTexture, leftTextureCoordinate).rgb;
vec3 rightColor = texture2D(inputImageTexture, rightTextureCoordinate).rgb;
vec3 topColor = texture2D(inputImageTexture, topTextureCoordinate).rgb;
vec4 centerColor2 = texture2D(inputImageTexture2, textureCoordinate2);
vec3 bottomColor2 = texture2D(inputImageTexture2, bottomTextureCoordinate2).rgb;
vec3 leftColor2 = texture2D(inputImageTexture2, leftTextureCoordinate2).rgb;
vec3 rightColor2 = texture2D(inputImageTexture2, rightTextureCoordinate2).rgb;
vec3 topColor2 = texture2D(inputImageTexture2, topTextureCoordinate2).rgb;
vec3 meanColor = (bottomColor + leftColor + rightColor + topColor) / 4.0;
vec3 diffColor = centerColor.rgb - meanColor;
vec3 meanColor2 = (bottomColor2 + leftColor2 + rightColor2 + topColor2) / 4.0;
vec3 diffColor2 = centerColor2.rgb - meanColor2;
vec3 gradColor = (meanColor + diffColor2);
gl_FragColor = vec4(mix(centerColor.rgb, gradColor, centerColor2.a * mixturePercent), centerColor.a);
}
);
#endif
@implementation GPUImagePoissonBlendFilter
@synthesize mix = _mix;
@synthesize numIterations = _numIterations;
- (id)init;
{
if (!(self = [super initWithFragmentShaderFromString:kGPUImagePoissonBlendFragmentShaderString]))
{
return nil;
}
mixUniform = [filterProgram uniformIndex:@"mixturePercent"];
self.mix = 0.5;
self.numIterations = 10;
return self;
}
- (void)setMix:(CGFloat)newValue;
{
_mix = newValue;
[self setFloat:_mix forUniform:mixUniform program:filterProgram];
}
//- (void)setOutputFBO;
//{
// if (self.numIterations % 2 == 1) {
// [self setSecondFilterFBO];
// } else {
// [self setFilterFBO];
// }
//}
- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;
{
// Run the first stage of the two-pass filter
[GPUImageContext setActiveShaderProgram:filterProgram];
[super renderToTextureWithVertices:vertices textureCoordinates:textureCoordinates];
for (int pass = 1; pass < self.numIterations; pass++) {
if (pass % 2 == 0) {
[GPUImageContext setActiveShaderProgram:filterProgram];
// TODO: This will over-unlock the incoming framebuffer
[super renderToTextureWithVertices:vertices textureCoordinates:[[self class] textureCoordinatesForRotation:kGPUImageNoRotation]];
} else {
// Run the second stage of the two-pass filter
secondOutputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:[self sizeOfFBO] textureOptions:self.outputTextureOptions onlyTexture:NO];
[secondOutputFramebuffer activateFramebuffer];
[GPUImageContext setActiveShaderProgram:filterProgram];
glClearColor(backgroundColorRed, backgroundColorGreen, backgroundColorBlue, backgroundColorAlpha);
glClear(GL_COLOR_BUFFER_BIT);
glActiveTexture(GL_TEXTURE2);
glBindTexture(GL_TEXTURE_2D, [outputFramebuffer texture]);
glUniform1i(filterInputTextureUniform, 2);
glActiveTexture(GL_TEXTURE3);
glBindTexture(GL_TEXTURE_2D, [secondInputFramebuffer texture]);
glUniform1i(filterInputTextureUniform2, 3);
glVertexAttribPointer(filterPositionAttribute, 2, GL_FLOAT, 0, 0, vertices);
glVertexAttribPointer(filterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, [[self class] textureCoordinatesForRotation:kGPUImageNoRotation]);
glVertexAttribPointer(filterSecondTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, [[self class] textureCoordinatesForRotation:inputRotation2]);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
}
}
}
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImagePoissonBlendFilter.m
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 1,554
|
```objective-c
#import "GPUImageLaplacianFilter.h"
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageLaplacianFragmentShaderString = SHADER_STRING
(
precision highp float;
uniform sampler2D inputImageTexture;
uniform mediump mat3 convolutionMatrix;
varying vec2 textureCoordinate;
varying vec2 leftTextureCoordinate;
varying vec2 rightTextureCoordinate;
varying vec2 topTextureCoordinate;
varying vec2 topLeftTextureCoordinate;
varying vec2 topRightTextureCoordinate;
varying vec2 bottomTextureCoordinate;
varying vec2 bottomLeftTextureCoordinate;
varying vec2 bottomRightTextureCoordinate;
void main()
{
mediump vec3 bottomColor = texture2D(inputImageTexture, bottomTextureCoordinate).rgb;
mediump vec3 bottomLeftColor = texture2D(inputImageTexture, bottomLeftTextureCoordinate).rgb;
mediump vec3 bottomRightColor = texture2D(inputImageTexture, bottomRightTextureCoordinate).rgb;
mediump vec4 centerColor = texture2D(inputImageTexture, textureCoordinate);
mediump vec3 leftColor = texture2D(inputImageTexture, leftTextureCoordinate).rgb;
mediump vec3 rightColor = texture2D(inputImageTexture, rightTextureCoordinate).rgb;
mediump vec3 topColor = texture2D(inputImageTexture, topTextureCoordinate).rgb;
mediump vec3 topRightColor = texture2D(inputImageTexture, topRightTextureCoordinate).rgb;
mediump vec3 topLeftColor = texture2D(inputImageTexture, topLeftTextureCoordinate).rgb;
mediump vec3 resultColor = topLeftColor * convolutionMatrix[0][0] + topColor * convolutionMatrix[0][1] + topRightColor * convolutionMatrix[0][2];
resultColor += leftColor * convolutionMatrix[1][0] + centerColor.rgb * convolutionMatrix[1][1] + rightColor * convolutionMatrix[1][2];
resultColor += bottomLeftColor * convolutionMatrix[2][0] + bottomColor * convolutionMatrix[2][1] + bottomRightColor * convolutionMatrix[2][2];
// Normalize the results to allow for negative gradients in the 0.0-1.0 colorspace
resultColor = resultColor + 0.5;
gl_FragColor = vec4(resultColor, centerColor.a);
}
);
#else
NSString *const kGPUImageLaplacianFragmentShaderString = SHADER_STRING
(
uniform sampler2D inputImageTexture;
uniform mat3 convolutionMatrix;
varying vec2 textureCoordinate;
varying vec2 leftTextureCoordinate;
varying vec2 rightTextureCoordinate;
varying vec2 topTextureCoordinate;
varying vec2 topLeftTextureCoordinate;
varying vec2 topRightTextureCoordinate;
varying vec2 bottomTextureCoordinate;
varying vec2 bottomLeftTextureCoordinate;
varying vec2 bottomRightTextureCoordinate;
void main()
{
vec3 bottomColor = texture2D(inputImageTexture, bottomTextureCoordinate).rgb;
vec3 bottomLeftColor = texture2D(inputImageTexture, bottomLeftTextureCoordinate).rgb;
vec3 bottomRightColor = texture2D(inputImageTexture, bottomRightTextureCoordinate).rgb;
vec4 centerColor = texture2D(inputImageTexture, textureCoordinate);
vec3 leftColor = texture2D(inputImageTexture, leftTextureCoordinate).rgb;
vec3 rightColor = texture2D(inputImageTexture, rightTextureCoordinate).rgb;
vec3 topColor = texture2D(inputImageTexture, topTextureCoordinate).rgb;
vec3 topRightColor = texture2D(inputImageTexture, topRightTextureCoordinate).rgb;
vec3 topLeftColor = texture2D(inputImageTexture, topLeftTextureCoordinate).rgb;
vec3 resultColor = topLeftColor * convolutionMatrix[0][0] + topColor * convolutionMatrix[0][1] + topRightColor * convolutionMatrix[0][2];
resultColor += leftColor * convolutionMatrix[1][0] + centerColor.rgb * convolutionMatrix[1][1] + rightColor * convolutionMatrix[1][2];
resultColor += bottomLeftColor * convolutionMatrix[2][0] + bottomColor * convolutionMatrix[2][1] + bottomRightColor * convolutionMatrix[2][2];
// Normalize the results to allow for negative gradients in the 0.0-1.0 colorspace
resultColor = resultColor + 0.5;
gl_FragColor = vec4(resultColor, centerColor.a);
}
);
#endif
@implementation GPUImageLaplacianFilter
- (id)init;
{
if (!(self = [super initWithFragmentShaderFromString:kGPUImageLaplacianFragmentShaderString]))
{
return nil;
}
GPUMatrix3x3 newConvolutionMatrix;
newConvolutionMatrix.one.one = 0.5;
newConvolutionMatrix.one.two = 1.0;
newConvolutionMatrix.one.three = 0.5;
newConvolutionMatrix.two.one = 1.0;
newConvolutionMatrix.two.two = -6.0;
newConvolutionMatrix.two.three = 1.0;
newConvolutionMatrix.three.one = 0.5;
newConvolutionMatrix.three.two = 1.0;
newConvolutionMatrix.three.three = 0.5;
self.convolutionKernel = newConvolutionMatrix;
return self;
}
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageLaplacianFilter.m
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 1,158
|
```objective-c
#import "GPUImageFilterGroup.h"
@class GPUImageGaussianBlurFilter;
@class GPUImageToonFilter;
/** This uses a similar process as the GPUImageToonFilter, only it precedes the toon effect with a Gaussian blur to smooth out noise.
*/
@interface GPUImageSmoothToonFilter : GPUImageFilterGroup
{
GPUImageGaussianBlurFilter *blurFilter;
GPUImageToonFilter *toonFilter;
}
/// The image width and height factors tweak the appearance of the edges. By default, they match the filter size in pixels
@property(readwrite, nonatomic) CGFloat texelWidth;
/// The image width and height factors tweak the appearance of the edges. By default, they match the filter size in pixels
@property(readwrite, nonatomic) CGFloat texelHeight;
/// The radius of the underlying Gaussian blur. The default is 2.0.
@property (readwrite, nonatomic) CGFloat blurRadiusInPixels;
/// The threshold at which to apply the edges, default of 0.2
@property(readwrite, nonatomic) CGFloat threshold;
/// The levels of quantization for the posterization of colors within the scene, with a default of 10.0
@property(readwrite, nonatomic) CGFloat quantizationLevels;
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageSmoothToonFilter.h
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 263
|
```objective-c
#import "GPUImageCrosshatchFilter.h"
// Shader code based on path_to_url
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageCrosshatchFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform highp float crossHatchSpacing;
uniform highp float lineWidth;
const highp vec3 W = vec3(0.2125, 0.7154, 0.0721);
void main()
{
highp float luminance = dot(texture2D(inputImageTexture, textureCoordinate).rgb, W);
lowp vec4 colorToDisplay = vec4(1.0, 1.0, 1.0, 1.0);
if (luminance < 1.00)
{
if (mod(textureCoordinate.x + textureCoordinate.y, crossHatchSpacing) <= lineWidth)
{
colorToDisplay = vec4(0.0, 0.0, 0.0, 1.0);
}
}
if (luminance < 0.75)
{
if (mod(textureCoordinate.x - textureCoordinate.y, crossHatchSpacing) <= lineWidth)
{
colorToDisplay = vec4(0.0, 0.0, 0.0, 1.0);
}
}
if (luminance < 0.50)
{
if (mod(textureCoordinate.x + textureCoordinate.y - (crossHatchSpacing / 2.0), crossHatchSpacing) <= lineWidth)
{
colorToDisplay = vec4(0.0, 0.0, 0.0, 1.0);
}
}
if (luminance < 0.3)
{
if (mod(textureCoordinate.x - textureCoordinate.y - (crossHatchSpacing / 2.0), crossHatchSpacing) <= lineWidth)
{
colorToDisplay = vec4(0.0, 0.0, 0.0, 1.0);
}
}
gl_FragColor = colorToDisplay;
}
);
#else
NSString *const kGPUImageCrosshatchFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform float crossHatchSpacing;
uniform float lineWidth;
const vec3 W = vec3(0.2125, 0.7154, 0.0721);
void main()
{
float luminance = dot(texture2D(inputImageTexture, textureCoordinate).rgb, W);
vec4 colorToDisplay = vec4(1.0, 1.0, 1.0, 1.0);
if (luminance < 1.00)
{
if (mod(textureCoordinate.x + textureCoordinate.y, crossHatchSpacing) <= lineWidth)
{
colorToDisplay = vec4(0.0, 0.0, 0.0, 1.0);
}
}
if (luminance < 0.75)
{
if (mod(textureCoordinate.x - textureCoordinate.y, crossHatchSpacing) <= lineWidth)
{
colorToDisplay = vec4(0.0, 0.0, 0.0, 1.0);
}
}
if (luminance < 0.50)
{
if (mod(textureCoordinate.x + textureCoordinate.y - (crossHatchSpacing / 2.0), crossHatchSpacing) <= lineWidth)
{
colorToDisplay = vec4(0.0, 0.0, 0.0, 1.0);
}
}
if (luminance < 0.3)
{
if (mod(textureCoordinate.x - textureCoordinate.y - (crossHatchSpacing / 2.0), crossHatchSpacing) <= lineWidth)
{
colorToDisplay = vec4(0.0, 0.0, 0.0, 1.0);
}
}
gl_FragColor = colorToDisplay;
}
);
#endif
@implementation GPUImageCrosshatchFilter
@synthesize crossHatchSpacing = _crossHatchSpacing;
@synthesize lineWidth = _lineWidth;
#pragma mark -
#pragma mark Initialization and teardown
- (id)init;
{
if (!(self = [super initWithFragmentShaderFromString:kGPUImageCrosshatchFragmentShaderString]))
{
return nil;
}
crossHatchSpacingUniform = [filterProgram uniformIndex:@"crossHatchSpacing"];
lineWidthUniform = [filterProgram uniformIndex:@"lineWidth"];
self.crossHatchSpacing = 0.03;
self.lineWidth = 0.003;
return self;
}
#pragma mark -
#pragma mark Accessors
- (void)setCrossHatchSpacing:(CGFloat)newValue;
{
CGFloat singlePixelSpacing;
if (inputTextureSize.width != 0.0)
{
singlePixelSpacing = 1.0 / inputTextureSize.width;
}
else
{
singlePixelSpacing = 1.0 / 2048.0;
}
if (newValue < singlePixelSpacing)
{
_crossHatchSpacing = singlePixelSpacing;
}
else
{
_crossHatchSpacing = newValue;
}
[self setFloat:_crossHatchSpacing forUniform:crossHatchSpacingUniform program:filterProgram];
}
- (void)setLineWidth:(CGFloat)newValue;
{
_lineWidth = newValue;
[self setFloat:_lineWidth forUniform:lineWidthUniform program:filterProgram];
}
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageCrosshatchFilter.m
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 1,232
|
```objective-c
#import "GPUImageRawDataOutput.h"
#import "GPUImageContext.h"
#import "GLProgram.h"
#import "GPUImageFilter.h"
#import "GPUImageMovieWriter.h"
@interface GPUImageRawDataOutput ()
{
GPUImageFramebuffer *firstInputFramebuffer, *outputFramebuffer, *retainedFramebuffer;
BOOL hasReadFromTheCurrentFrame;
GLProgram *dataProgram;
GLint dataPositionAttribute, dataTextureCoordinateAttribute;
GLint dataInputTextureUniform;
GLubyte *_rawBytesForImage;
BOOL lockNextFramebuffer;
}
// Frame rendering
- (void)renderAtInternalSize;
@end
@implementation GPUImageRawDataOutput
@synthesize rawBytesForImage = _rawBytesForImage;
@synthesize newFrameAvailableBlock = _newFrameAvailableBlock;
@synthesize enabled;
#pragma mark -
#pragma mark Initialization and teardown
- (id)initWithImageSize:(CGSize)newImageSize resultsInBGRAFormat:(BOOL)resultsInBGRAFormat;
{
if (!(self = [super init]))
{
return nil;
}
self.enabled = YES;
lockNextFramebuffer = NO;
outputBGRA = resultsInBGRAFormat;
imageSize = newImageSize;
hasReadFromTheCurrentFrame = NO;
_rawBytesForImage = NULL;
inputRotation = kGPUImageNoRotation;
[GPUImageContext useImageProcessingContext];
if ( (outputBGRA && ![GPUImageContext supportsFastTextureUpload]) || (!outputBGRA && [GPUImageContext supportsFastTextureUpload]) )
{
dataProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImageColorSwizzlingFragmentShaderString];
}
else
{
dataProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImagePassthroughFragmentShaderString];
}
if (!dataProgram.initialized)
{
[dataProgram addAttribute:@"position"];
[dataProgram addAttribute:@"inputTextureCoordinate"];
if (![dataProgram link])
{
NSString *progLog = [dataProgram programLog];
NSLog(@"Program link log: %@", progLog);
NSString *fragLog = [dataProgram fragmentShaderLog];
NSLog(@"Fragment shader compile log: %@", fragLog);
NSString *vertLog = [dataProgram vertexShaderLog];
NSLog(@"Vertex shader compile log: %@", vertLog);
dataProgram = nil;
NSAssert(NO, @"Filter shader link failed");
}
}
dataPositionAttribute = [dataProgram attributeIndex:@"position"];
dataTextureCoordinateAttribute = [dataProgram attributeIndex:@"inputTextureCoordinate"];
dataInputTextureUniform = [dataProgram uniformIndex:@"inputImageTexture"];
return self;
}
- (void)dealloc
{
if (_rawBytesForImage != NULL && (![GPUImageContext supportsFastTextureUpload]))
{
free(_rawBytesForImage);
_rawBytesForImage = NULL;
}
}
#pragma mark -
#pragma mark Data access
- (void)renderAtInternalSize;
{
[GPUImageContext setActiveShaderProgram:dataProgram];
outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:imageSize onlyTexture:NO];
[outputFramebuffer activateFramebuffer];
if(lockNextFramebuffer)
{
retainedFramebuffer = outputFramebuffer;
[retainedFramebuffer lock];
[retainedFramebuffer lockForReading];
lockNextFramebuffer = NO;
}
glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
static const GLfloat squareVertices[] = {
-1.0f, -1.0f,
1.0f, -1.0f,
-1.0f, 1.0f,
1.0f, 1.0f,
};
static const GLfloat textureCoordinates[] = {
0.0f, 0.0f,
1.0f, 0.0f,
0.0f, 1.0f,
1.0f, 1.0f,
};
glActiveTexture(GL_TEXTURE4);
glBindTexture(GL_TEXTURE_2D, [firstInputFramebuffer texture]);
glUniform1i(dataInputTextureUniform, 4);
glVertexAttribPointer(dataPositionAttribute, 2, GL_FLOAT, 0, 0, squareVertices);
glVertexAttribPointer(dataTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);
glEnableVertexAttribArray(dataPositionAttribute);
glEnableVertexAttribArray(dataTextureCoordinateAttribute);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
[firstInputFramebuffer unlock];
}
- (GPUByteColorVector)colorAtLocation:(CGPoint)locationInImage;
{
GPUByteColorVector *imageColorBytes = (GPUByteColorVector *)self.rawBytesForImage;
// NSLog(@"Row start");
// for (unsigned int currentXPosition = 0; currentXPosition < (imageSize.width * 2.0); currentXPosition++)
// {
// GPUByteColorVector byteAtPosition = imageColorBytes[currentXPosition];
// NSLog(@"%d - %d, %d, %d", currentXPosition, byteAtPosition.red, byteAtPosition.green, byteAtPosition.blue);
// }
// NSLog(@"Row end");
// GPUByteColorVector byteAtOne = imageColorBytes[1];
// GPUByteColorVector byteAtWidth = imageColorBytes[(int)imageSize.width - 3];
// GPUByteColorVector byteAtHeight = imageColorBytes[(int)(imageSize.height - 1) * (int)imageSize.width];
// NSLog(@"Byte 1: %d, %d, %d, byte 2: %d, %d, %d, byte 3: %d, %d, %d", byteAtOne.red, byteAtOne.green, byteAtOne.blue, byteAtWidth.red, byteAtWidth.green, byteAtWidth.blue, byteAtHeight.red, byteAtHeight.green, byteAtHeight.blue);
CGPoint locationToPickFrom = CGPointZero;
locationToPickFrom.x = MIN(MAX(locationInImage.x, 0.0), (imageSize.width - 1.0));
locationToPickFrom.y = MIN(MAX((imageSize.height - locationInImage.y), 0.0), (imageSize.height - 1.0));
if (outputBGRA)
{
GPUByteColorVector flippedColor = imageColorBytes[(int)(round((locationToPickFrom.y * imageSize.width) + locationToPickFrom.x))];
GLubyte temporaryRed = flippedColor.red;
flippedColor.red = flippedColor.blue;
flippedColor.blue = temporaryRed;
return flippedColor;
}
else
{
return imageColorBytes[(int)(round((locationToPickFrom.y * imageSize.width) + locationToPickFrom.x))];
}
}
#pragma mark -
#pragma mark GPUImageInput protocol
- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex;
{
hasReadFromTheCurrentFrame = NO;
if (_newFrameAvailableBlock != NULL)
{
_newFrameAvailableBlock();
}
}
- (NSInteger)nextAvailableTextureIndex;
{
return 0;
}
- (void)setInputFramebuffer:(GPUImageFramebuffer *)newInputFramebuffer atIndex:(NSInteger)textureIndex;
{
firstInputFramebuffer = newInputFramebuffer;
[firstInputFramebuffer lock];
}
- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;
{
inputRotation = newInputRotation;
}
- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;
{
}
- (CGSize)maximumOutputSize;
{
return imageSize;
}
- (void)endProcessing;
{
}
- (BOOL)shouldIgnoreUpdatesToThisTarget;
{
return NO;
}
- (BOOL)wantsMonochromeInput;
{
return NO;
}
- (void)setCurrentlyReceivingMonochromeInput:(BOOL)newValue;
{
}
#pragma mark -
#pragma mark Accessors
- (GLubyte *)rawBytesForImage;
{
if ( (_rawBytesForImage == NULL) && (![GPUImageContext supportsFastTextureUpload]) )
{
_rawBytesForImage = (GLubyte *) calloc(imageSize.width * imageSize.height * 4, sizeof(GLubyte));
hasReadFromTheCurrentFrame = NO;
}
if (hasReadFromTheCurrentFrame)
{
return _rawBytesForImage;
}
else
{
runSynchronouslyOnVideoProcessingQueue(^{
// Note: the fast texture caches speed up 640x480 frame reads from 9.6 ms to 3.1 ms on iPhone 4S
[GPUImageContext useImageProcessingContext];
[self renderAtInternalSize];
if ([GPUImageContext supportsFastTextureUpload])
{
glFinish();
_rawBytesForImage = [outputFramebuffer byteBuffer];
}
else
{
glReadPixels(0, 0, imageSize.width, imageSize.height, GL_RGBA, GL_UNSIGNED_BYTE, _rawBytesForImage);
// GL_EXT_read_format_bgra
// glReadPixels(0, 0, imageSize.width, imageSize.height, GL_BGRA_EXT, GL_UNSIGNED_BYTE, _rawBytesForImage);
}
hasReadFromTheCurrentFrame = YES;
});
return _rawBytesForImage;
}
}
- (NSUInteger)bytesPerRowInOutput;
{
return [retainedFramebuffer bytesPerRow];
}
- (void)setImageSize:(CGSize)newImageSize {
imageSize = newImageSize;
if (_rawBytesForImage != NULL && (![GPUImageContext supportsFastTextureUpload]))
{
free(_rawBytesForImage);
_rawBytesForImage = NULL;
}
}
- (void)lockFramebufferForReading;
{
lockNextFramebuffer = YES;
}
- (void)unlockFramebufferAfterReading;
{
[retainedFramebuffer unlockAfterReading];
[retainedFramebuffer unlock];
retainedFramebuffer = nil;
}
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageRawDataOutput.m
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 2,184
|
```objective-c
#import "GPUImageTwoPassFilter.h"
@interface GPUImageSobelEdgeDetectionFilter : GPUImageTwoPassFilter
{
GLint texelWidthUniform, texelHeightUniform, edgeStrengthUniform;
BOOL hasOverriddenImageSizeFactor;
}
// The texel width and height factors tweak the appearance of the edges. By default, they match the inverse of the filter size in pixels
@property(readwrite, nonatomic) CGFloat texelWidth;
@property(readwrite, nonatomic) CGFloat texelHeight;
// The filter strength property affects the dynamic range of the filter. High values can make edges more visible, but can lead to saturation. Default of 1.0.
@property(readwrite, nonatomic) CGFloat edgeStrength;
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageSobelEdgeDetectionFilter.h
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 151
|
```objective-c
#import "GPUImageFilter.h"
// This is an accumulator that uses a Hough transform in parallel coordinate space to identify probable lines in a scene.
//
// It is entirely based on the work of the Graph@FIT research group at the Brno University of Technology and their publications:
// M. Dubsk, J. Havel, and A. Herout. Real-Time Detection of Lines using Parallel Coordinates and OpenGL. Proceedings of SCCG 2011, Bratislava, SK, p. 7.
// M. Dubsk, J. Havel, and A. Herout. PClines Line detection using parallel coordinates. 2011 IEEE Conference on Computer Vision and Pattern Recognition (CVPR), p. 1489- 1494.
@interface GPUImageParallelCoordinateLineTransformFilter : GPUImageFilter
{
GLubyte *rawImagePixels;
GLfloat *lineCoordinates;
unsigned int maxLinePairsToRender, linePairsToRender;
}
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageParallelCoordinateLineTransformFilter.h
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 201
|
```objective-c
#import "GPUImage3x3TextureSamplingFilter.h"
/** Runs a 3x3 convolution kernel against the image
*/
@interface GPUImage3x3ConvolutionFilter : GPUImage3x3TextureSamplingFilter
{
GLint convolutionMatrixUniform;
}
/** Convolution kernel to run against the image
The convolution kernel is a 3x3 matrix of values to apply to the pixel and its 8 surrounding pixels.
The matrix is specified in row-major order, with the top left pixel being one.one and the bottom right three.three
If the values in the matrix don't add up to 1.0, the image could be brightened or darkened.
*/
@property(readwrite, nonatomic) GPUMatrix3x3 convolutionKernel;
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImage3x3ConvolutionFilter.h
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 157
|
```objective-c
#import "GPUImageFilter.h"
@interface GPUImageHueFilter : GPUImageFilter
{
GLint hueAdjustUniform;
}
@property (nonatomic, readwrite) CGFloat hue;
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageHueFilter.h
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 41
|
```objective-c
#import "GPUImageFilterGroup.h"
@class GPUImageGrayscaleFilter;
@class GPUImage3x3TextureSamplingFilter;
@class GPUImageNonMaximumSuppressionFilter;
/*
An implementation of the Features from Accelerated Segment Test (FAST) feature detector as described in the following publications:
E. Rosten and T. Drummond. Fusing points and lines for high performance tracking. IEEE International Conference on Computer Vision, 2005.
E. Rosten and T. Drummond. Machine learning for high-speed corner detection. European Conference on Computer Vision, 2006.
For more about the FAST feature detector, see the resources here:
path_to_url
*/
typedef enum { kGPUImageFAST12Contiguous, kGPUImageFAST12ContiguousNonMaximumSuppressed} GPUImageFASTDetectorType;
@interface GPUImageFASTCornerDetectionFilter : GPUImageFilterGroup
{
GPUImageGrayscaleFilter *luminanceReductionFilter;
GPUImage3x3TextureSamplingFilter *featureDetectionFilter;
GPUImageNonMaximumSuppressionFilter *nonMaximumSuppressionFilter;
// Generate a lookup texture based on the bit patterns
// Step 1: convert to monochrome if necessary
// Step 2: do a lookup at each pixel based on the Bresenham circle, encode comparison in two color components
// Step 3: do non-maximum suppression of close corner points
}
- (id)initWithFASTDetectorVariant:(GPUImageFASTDetectorType)detectorType;
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageFASTCornerDetectionFilter.h
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 310
|
```objective-c
#import "GPUImagePrewittEdgeDetectionFilter.h"
@implementation GPUImagePrewittEdgeDetectionFilter
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImagePrewittFragmentShaderString = SHADER_STRING
(
precision highp float;
varying vec2 textureCoordinate;
varying vec2 leftTextureCoordinate;
varying vec2 rightTextureCoordinate;
varying vec2 topTextureCoordinate;
varying vec2 topLeftTextureCoordinate;
varying vec2 topRightTextureCoordinate;
varying vec2 bottomTextureCoordinate;
varying vec2 bottomLeftTextureCoordinate;
varying vec2 bottomRightTextureCoordinate;
uniform sampler2D inputImageTexture;
uniform float edgeStrength;
void main()
{
float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r;
float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r;
float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r;
float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r;
float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r;
float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r;
float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r;
float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r;
float h = -topLeftIntensity - topIntensity - topRightIntensity + bottomLeftIntensity + bottomIntensity + bottomRightIntensity;
float v = -bottomLeftIntensity - leftIntensity - topLeftIntensity + bottomRightIntensity + rightIntensity + topRightIntensity;
float mag = length(vec2(h, v)) * edgeStrength;
gl_FragColor = vec4(vec3(mag), 1.0);
}
);
#else
NSString *const kGPUImagePrewittFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
varying vec2 leftTextureCoordinate;
varying vec2 rightTextureCoordinate;
varying vec2 topTextureCoordinate;
varying vec2 topLeftTextureCoordinate;
varying vec2 topRightTextureCoordinate;
varying vec2 bottomTextureCoordinate;
varying vec2 bottomLeftTextureCoordinate;
varying vec2 bottomRightTextureCoordinate;
uniform sampler2D inputImageTexture;
uniform float edgeStrength;
void main()
{
float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r;
float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r;
float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r;
float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r;
float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r;
float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r;
float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r;
float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r;
float h = -topLeftIntensity - topIntensity - topRightIntensity + bottomLeftIntensity + bottomIntensity + bottomRightIntensity;
float v = -bottomLeftIntensity - leftIntensity - topLeftIntensity + bottomRightIntensity + rightIntensity + topRightIntensity;
float mag = length(vec2(h, v)) * edgeStrength;
gl_FragColor = vec4(vec3(mag), 1.0);
}
);
#endif
#pragma mark -
#pragma mark Initialization and teardown
- (id)init;
{
if (!(self = [self initWithFragmentShaderFromString:kGPUImagePrewittFragmentShaderString]))
{
return nil;
}
self.edgeStrength = 1.0;
return self;
}
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImagePrewittEdgeDetectionFilter.m
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 793
|
```objective-c
#import "GPUImageFilter.h"
@interface GPUImageLineGenerator : GPUImageFilter
{
GLint lineWidthUniform, lineColorUniform;
GLfloat *lineCoordinates;
}
// The width of the displayed lines, in pixels. The default is 1.
@property(readwrite, nonatomic) CGFloat lineWidth;
// The color of the lines is specified using individual red, green, and blue components (normalized to 1.0). The default is green: (0.0, 1.0, 0.0).
- (void)setLineColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent;
// Rendering
- (void)renderLinesFromArray:(GLfloat *)lineSlopeAndIntercepts count:(NSUInteger)numberOfLines frameTime:(CMTime)frameTime;
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageLineGenerator.h
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 165
|
```objective-c
#import "GPUImageColorMatrixFilter.h"
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageColorMatrixFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform lowp mat4 colorMatrix;
uniform lowp float intensity;
void main()
{
lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
lowp vec4 outputColor = textureColor * colorMatrix;
gl_FragColor = (intensity * outputColor) + ((1.0 - intensity) * textureColor);
}
);
#else
NSString *const kGPUImageColorMatrixFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform mat4 colorMatrix;
uniform float intensity;
void main()
{
vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
vec4 outputColor = textureColor * colorMatrix;
gl_FragColor = (intensity * outputColor) + ((1.0 - intensity) * textureColor);
}
);
#endif
@implementation GPUImageColorMatrixFilter
@synthesize intensity = _intensity;
@synthesize colorMatrix = _colorMatrix;
#pragma mark -
#pragma mark Initialization and teardown
- (id)init;
{
if (!(self = [super initWithFragmentShaderFromString:kGPUImageColorMatrixFragmentShaderString]))
{
return nil;
}
colorMatrixUniform = [filterProgram uniformIndex:@"colorMatrix"];
intensityUniform = [filterProgram uniformIndex:@"intensity"];
self.intensity = 1.f;
self.colorMatrix = (GPUMatrix4x4){
{1.f, 0.f, 0.f, 0.f},
{0.f, 1.f, 0.f, 0.f},
{0.f, 0.f, 1.f, 0.f},
{0.f, 0.f, 0.f, 1.f}
};
return self;
}
#pragma mark -
#pragma mark Accessors
- (void)setIntensity:(CGFloat)newIntensity;
{
_intensity = newIntensity;
[self setFloat:_intensity forUniform:intensityUniform program:filterProgram];
}
- (void)setColorMatrix:(GPUMatrix4x4)newColorMatrix;
{
_colorMatrix = newColorMatrix;
[self setMatrix4f:_colorMatrix forUniform:colorMatrixUniform program:filterProgram];
}
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageColorMatrixFilter.m
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 535
|
```objective-c
#import <Foundation/Foundation.h>
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
#import <OpenGLES/EAGL.h>
#import <OpenGLES/ES2/gl.h>
#import <OpenGLES/ES2/glext.h>
#else
#import <OpenGL/OpenGL.h>
#import <OpenGL/gl.h>
#endif
#import <QuartzCore/QuartzCore.h>
#import <CoreMedia/CoreMedia.h>
typedef struct GPUTextureOptions {
GLenum minFilter;
GLenum magFilter;
GLenum wrapS;
GLenum wrapT;
GLenum internalFormat;
GLenum format;
GLenum type;
} GPUTextureOptions;
@interface GPUImageFramebuffer : NSObject
@property(readonly) CGSize size;
@property(readonly) GPUTextureOptions textureOptions;
@property(readonly) GLuint texture;
@property(readonly) BOOL missingFramebuffer;
// Initialization and teardown
- (id)initWithSize:(CGSize)framebufferSize;
- (id)initWithSize:(CGSize)framebufferSize textureOptions:(GPUTextureOptions)fboTextureOptions onlyTexture:(BOOL)onlyGenerateTexture;
- (id)initWithSize:(CGSize)framebufferSize overriddenTexture:(GLuint)inputTexture;
// Usage
- (void)activateFramebuffer;
// Reference counting
- (void)lock;
- (void)unlock;
- (void)clearAllLocks;
- (void)disableReferenceCounting;
- (void)enableReferenceCounting;
// Image capture
- (CGImageRef)newCGImageFromFramebufferContents;
- (void)restoreRenderTarget;
// Raw data bytes
- (void)lockForReading;
- (void)unlockAfterReading;
- (NSUInteger)bytesPerRow;
- (GLubyte *)byteBuffer;
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageFramebuffer.h
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 350
|
```objective-c
#import "GPUImageUnsharpMaskFilter.h"
#import "GPUImageFilter.h"
#import "GPUImageTwoInputFilter.h"
#import "GPUImageGaussianBlurFilter.h"
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageUnsharpMaskFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
varying highp vec2 textureCoordinate2;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
uniform highp float intensity;
void main()
{
lowp vec4 sharpImageColor = texture2D(inputImageTexture, textureCoordinate);
lowp vec3 blurredImageColor = texture2D(inputImageTexture2, textureCoordinate2).rgb;
gl_FragColor = vec4(sharpImageColor.rgb * intensity + blurredImageColor * (1.0 - intensity), sharpImageColor.a);
// gl_FragColor = mix(blurredImageColor, sharpImageColor, intensity);
// gl_FragColor = vec4(sharpImageColor.rgb - (blurredImageColor.rgb * intensity), 1.0);
}
);
#else
NSString *const kGPUImageUnsharpMaskFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
varying vec2 textureCoordinate2;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
uniform float intensity;
void main()
{
vec4 sharpImageColor = texture2D(inputImageTexture, textureCoordinate);
vec3 blurredImageColor = texture2D(inputImageTexture2, textureCoordinate2).rgb;
gl_FragColor = vec4(sharpImageColor.rgb * intensity + blurredImageColor * (1.0 - intensity), sharpImageColor.a);
// gl_FragColor = mix(blurredImageColor, sharpImageColor, intensity);
// gl_FragColor = vec4(sharpImageColor.rgb - (blurredImageColor.rgb * intensity), 1.0);
}
);
#endif
@implementation GPUImageUnsharpMaskFilter
@synthesize blurRadiusInPixels;
@synthesize intensity = _intensity;
- (id)init;
{
if (!(self = [super init]))
{
return nil;
}
// First pass: apply a variable Gaussian blur
blurFilter = [[GPUImageGaussianBlurFilter alloc] init];
[self addFilter:blurFilter];
// Second pass: combine the blurred image with the original sharp one
unsharpMaskFilter = [[GPUImageTwoInputFilter alloc] initWithFragmentShaderFromString:kGPUImageUnsharpMaskFragmentShaderString];
[self addFilter:unsharpMaskFilter];
// Texture location 0 needs to be the sharp image for both the blur and the second stage processing
[blurFilter addTarget:unsharpMaskFilter atTextureLocation:1];
self.initialFilters = [NSArray arrayWithObjects:blurFilter, unsharpMaskFilter, nil];
self.terminalFilter = unsharpMaskFilter;
self.intensity = 1.0;
self.blurRadiusInPixels = 4.0;
return self;
}
#pragma mark -
#pragma mark Accessors
- (void)setBlurRadiusInPixels:(CGFloat)newValue;
{
blurFilter.blurRadiusInPixels = newValue;
}
- (CGFloat)blurRadiusInPixels;
{
return blurFilter.blurRadiusInPixels;
}
- (void)setIntensity:(CGFloat)newValue;
{
_intensity = newValue;
[unsharpMaskFilter setFloat:newValue forUniformName:@"intensity"];
}
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageUnsharpMaskFilter.m
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 751
|
```objective-c
#import "GPUImageTwoPassTextureSamplingFilter.h"
// For each pixel, this sets it to the maximum value of each color channel in a rectangular neighborhood extending out dilationRadius pixels from the center.
// This extends out brighter colors, and can be used for abstraction of color images.
@interface GPUImageRGBDilationFilter : GPUImageTwoPassTextureSamplingFilter
// Acceptable values for dilationRadius, which sets the distance in pixels to sample out from the center, are 1, 2, 3, and 4.
- (id)initWithRadius:(NSUInteger)dilationRadius;
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageRGBDilationFilter.h
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 122
|
```objective-c
#import "GPUImageFilter.h"
@interface GPUImageSharpenFilter : GPUImageFilter
{
GLint sharpnessUniform;
GLint imageWidthFactorUniform, imageHeightFactorUniform;
}
// Sharpness ranges from -4.0 to 4.0, with 0.0 as the normal level
@property(readwrite, nonatomic) CGFloat sharpness;
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageSharpenFilter.h
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 77
|
```objective-c
#import "GPUImageLookupFilter.h"
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageLookupFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
varying highp vec2 textureCoordinate2; // TODO: This is not used
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2; // lookup texture
uniform lowp float intensity;
void main()
{
highp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
highp float blueColor = textureColor.b * 63.0;
highp vec2 quad1;
quad1.y = floor(floor(blueColor) / 8.0);
quad1.x = floor(blueColor) - (quad1.y * 8.0);
highp vec2 quad2;
quad2.y = floor(ceil(blueColor) / 8.0);
quad2.x = ceil(blueColor) - (quad2.y * 8.0);
highp vec2 texPos1;
texPos1.x = (quad1.x * 0.125) + 0.5/512.0 + ((0.125 - 1.0/512.0) * textureColor.r);
texPos1.y = (quad1.y * 0.125) + 0.5/512.0 + ((0.125 - 1.0/512.0) * textureColor.g);
highp vec2 texPos2;
texPos2.x = (quad2.x * 0.125) + 0.5/512.0 + ((0.125 - 1.0/512.0) * textureColor.r);
texPos2.y = (quad2.y * 0.125) + 0.5/512.0 + ((0.125 - 1.0/512.0) * textureColor.g);
lowp vec4 newColor1 = texture2D(inputImageTexture2, texPos1);
lowp vec4 newColor2 = texture2D(inputImageTexture2, texPos2);
lowp vec4 newColor = mix(newColor1, newColor2, fract(blueColor));
gl_FragColor = mix(textureColor, vec4(newColor.rgb, textureColor.w), intensity);
}
);
#else
NSString *const kGPUImageLookupFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
varying vec2 textureCoordinate2; // TODO: This is not used
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2; // lookup texture
uniform float intensity;
void main()
{
vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
float blueColor = textureColor.b * 63.0;
vec2 quad1;
quad1.y = floor(floor(blueColor) / 8.0);
quad1.x = floor(blueColor) - (quad1.y * 8.0);
vec2 quad2;
quad2.y = floor(ceil(blueColor) / 8.0);
quad2.x = ceil(blueColor) - (quad2.y * 8.0);
vec2 texPos1;
texPos1.x = (quad1.x * 0.125) + 0.5/512.0 + ((0.125 - 1.0/512.0) * textureColor.r);
texPos1.y = (quad1.y * 0.125) + 0.5/512.0 + ((0.125 - 1.0/512.0) * textureColor.g);
vec2 texPos2;
texPos2.x = (quad2.x * 0.125) + 0.5/512.0 + ((0.125 - 1.0/512.0) * textureColor.r);
texPos2.y = (quad2.y * 0.125) + 0.5/512.0 + ((0.125 - 1.0/512.0) * textureColor.g);
vec4 newColor1 = texture2D(inputImageTexture2, texPos1);
vec4 newColor2 = texture2D(inputImageTexture2, texPos2);
vec4 newColor = mix(newColor1, newColor2, fract(blueColor));
gl_FragColor = mix(textureColor, vec4(newColor.rgb, textureColor.w), intensity);
}
);
#endif
@implementation GPUImageLookupFilter
@synthesize intensity = _intensity;
#pragma mark -
#pragma mark Initialization and teardown
- (id)init;
{
intensityUniform = [filterProgram uniformIndex:@"intensity"];
self.intensity = 1.0f;
if (!(self = [super initWithFragmentShaderFromString:kGPUImageLookupFragmentShaderString]))
{
return nil;
}
return self;
}
#pragma mark -
#pragma mark Accessors
- (void)setIntensity:(CGFloat)intensity
{
_intensity = intensity;
[self setFloat:_intensity forUniform:intensityUniform program:filterProgram];
}
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageLookupFilter.m
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 1,121
|
```objective-c
#import "GPUImageDilationFilter.h"
@implementation GPUImageDilationFilter
NSString *const kGPUImageDilationRadiusOneVertexShaderString = SHADER_STRING
(
attribute vec4 position;
attribute vec2 inputTextureCoordinate;
uniform float texelWidthOffset;
uniform float texelHeightOffset;
varying vec2 centerTextureCoordinate;
varying vec2 oneStepPositiveTextureCoordinate;
varying vec2 oneStepNegativeTextureCoordinate;
void main()
{
gl_Position = position;
vec2 offset = vec2(texelWidthOffset, texelHeightOffset);
centerTextureCoordinate = inputTextureCoordinate;
oneStepNegativeTextureCoordinate = inputTextureCoordinate - offset;
oneStepPositiveTextureCoordinate = inputTextureCoordinate + offset;
}
);
NSString *const kGPUImageDilationRadiusTwoVertexShaderString = SHADER_STRING
(
attribute vec4 position;
attribute vec2 inputTextureCoordinate;
uniform float texelWidthOffset;
uniform float texelHeightOffset;
varying vec2 centerTextureCoordinate;
varying vec2 oneStepPositiveTextureCoordinate;
varying vec2 oneStepNegativeTextureCoordinate;
varying vec2 twoStepsPositiveTextureCoordinate;
varying vec2 twoStepsNegativeTextureCoordinate;
void main()
{
gl_Position = position;
vec2 offset = vec2(texelWidthOffset, texelHeightOffset);
centerTextureCoordinate = inputTextureCoordinate;
oneStepNegativeTextureCoordinate = inputTextureCoordinate - offset;
oneStepPositiveTextureCoordinate = inputTextureCoordinate + offset;
twoStepsNegativeTextureCoordinate = inputTextureCoordinate - (offset * 2.0);
twoStepsPositiveTextureCoordinate = inputTextureCoordinate + (offset * 2.0);
}
);
NSString *const kGPUImageDilationRadiusThreeVertexShaderString = SHADER_STRING
(
attribute vec4 position;
attribute vec2 inputTextureCoordinate;
uniform float texelWidthOffset;
uniform float texelHeightOffset;
varying vec2 centerTextureCoordinate;
varying vec2 oneStepPositiveTextureCoordinate;
varying vec2 oneStepNegativeTextureCoordinate;
varying vec2 twoStepsPositiveTextureCoordinate;
varying vec2 twoStepsNegativeTextureCoordinate;
varying vec2 threeStepsPositiveTextureCoordinate;
varying vec2 threeStepsNegativeTextureCoordinate;
void main()
{
gl_Position = position;
vec2 offset = vec2(texelWidthOffset, texelHeightOffset);
centerTextureCoordinate = inputTextureCoordinate;
oneStepNegativeTextureCoordinate = inputTextureCoordinate - offset;
oneStepPositiveTextureCoordinate = inputTextureCoordinate + offset;
twoStepsNegativeTextureCoordinate = inputTextureCoordinate - (offset * 2.0);
twoStepsPositiveTextureCoordinate = inputTextureCoordinate + (offset * 2.0);
threeStepsNegativeTextureCoordinate = inputTextureCoordinate - (offset * 3.0);
threeStepsPositiveTextureCoordinate = inputTextureCoordinate + (offset * 3.0);
}
);
NSString *const kGPUImageDilationRadiusFourVertexShaderString = SHADER_STRING
(
attribute vec4 position;
attribute vec2 inputTextureCoordinate;
uniform float texelWidthOffset;
uniform float texelHeightOffset;
varying vec2 centerTextureCoordinate;
varying vec2 oneStepPositiveTextureCoordinate;
varying vec2 oneStepNegativeTextureCoordinate;
varying vec2 twoStepsPositiveTextureCoordinate;
varying vec2 twoStepsNegativeTextureCoordinate;
varying vec2 threeStepsPositiveTextureCoordinate;
varying vec2 threeStepsNegativeTextureCoordinate;
varying vec2 fourStepsPositiveTextureCoordinate;
varying vec2 fourStepsNegativeTextureCoordinate;
void main()
{
gl_Position = position;
vec2 offset = vec2(texelWidthOffset, texelHeightOffset);
centerTextureCoordinate = inputTextureCoordinate;
oneStepNegativeTextureCoordinate = inputTextureCoordinate - offset;
oneStepPositiveTextureCoordinate = inputTextureCoordinate + offset;
twoStepsNegativeTextureCoordinate = inputTextureCoordinate - (offset * 2.0);
twoStepsPositiveTextureCoordinate = inputTextureCoordinate + (offset * 2.0);
threeStepsNegativeTextureCoordinate = inputTextureCoordinate - (offset * 3.0);
threeStepsPositiveTextureCoordinate = inputTextureCoordinate + (offset * 3.0);
fourStepsNegativeTextureCoordinate = inputTextureCoordinate - (offset * 4.0);
fourStepsPositiveTextureCoordinate = inputTextureCoordinate + (offset * 4.0);
}
);
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageDilationRadiusOneFragmentShaderString = SHADER_STRING
(
precision lowp float;
varying vec2 centerTextureCoordinate;
varying vec2 oneStepPositiveTextureCoordinate;
varying vec2 oneStepNegativeTextureCoordinate;
uniform sampler2D inputImageTexture;
void main()
{
float centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate).r;
float oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate).r;
float oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate).r;
lowp float maxValue = max(centerIntensity, oneStepPositiveIntensity);
maxValue = max(maxValue, oneStepNegativeIntensity);
gl_FragColor = vec4(vec3(maxValue), 1.0);
}
);
NSString *const kGPUImageDilationRadiusTwoFragmentShaderString = SHADER_STRING
(
precision lowp float;
varying vec2 centerTextureCoordinate;
varying vec2 oneStepPositiveTextureCoordinate;
varying vec2 oneStepNegativeTextureCoordinate;
varying vec2 twoStepsPositiveTextureCoordinate;
varying vec2 twoStepsNegativeTextureCoordinate;
uniform sampler2D inputImageTexture;
void main()
{
float centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate).r;
float oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate).r;
float oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate).r;
float twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate).r;
float twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate).r;
lowp float maxValue = max(centerIntensity, oneStepPositiveIntensity);
maxValue = max(maxValue, oneStepNegativeIntensity);
maxValue = max(maxValue, twoStepsPositiveIntensity);
maxValue = max(maxValue, twoStepsNegativeIntensity);
gl_FragColor = vec4(vec3(maxValue), 1.0);
}
);
NSString *const kGPUImageDilationRadiusThreeFragmentShaderString = SHADER_STRING
(
precision lowp float;
varying vec2 centerTextureCoordinate;
varying vec2 oneStepPositiveTextureCoordinate;
varying vec2 oneStepNegativeTextureCoordinate;
varying vec2 twoStepsPositiveTextureCoordinate;
varying vec2 twoStepsNegativeTextureCoordinate;
varying vec2 threeStepsPositiveTextureCoordinate;
varying vec2 threeStepsNegativeTextureCoordinate;
uniform sampler2D inputImageTexture;
void main()
{
float centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate).r;
float oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate).r;
float oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate).r;
float twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate).r;
float twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate).r;
float threeStepsPositiveIntensity = texture2D(inputImageTexture, threeStepsPositiveTextureCoordinate).r;
float threeStepsNegativeIntensity = texture2D(inputImageTexture, threeStepsNegativeTextureCoordinate).r;
lowp float maxValue = max(centerIntensity, oneStepPositiveIntensity);
maxValue = max(maxValue, oneStepNegativeIntensity);
maxValue = max(maxValue, twoStepsPositiveIntensity);
maxValue = max(maxValue, twoStepsNegativeIntensity);
maxValue = max(maxValue, threeStepsPositiveIntensity);
maxValue = max(maxValue, threeStepsNegativeIntensity);
gl_FragColor = vec4(vec3(maxValue), 1.0);
}
);
NSString *const kGPUImageDilationRadiusFourFragmentShaderString = SHADER_STRING
(
precision lowp float;
varying vec2 centerTextureCoordinate;
varying vec2 oneStepPositiveTextureCoordinate;
varying vec2 oneStepNegativeTextureCoordinate;
varying vec2 twoStepsPositiveTextureCoordinate;
varying vec2 twoStepsNegativeTextureCoordinate;
varying vec2 threeStepsPositiveTextureCoordinate;
varying vec2 threeStepsNegativeTextureCoordinate;
varying vec2 fourStepsPositiveTextureCoordinate;
varying vec2 fourStepsNegativeTextureCoordinate;
uniform sampler2D inputImageTexture;
void main()
{
float centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate).r;
float oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate).r;
float oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate).r;
float twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate).r;
float twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate).r;
float threeStepsPositiveIntensity = texture2D(inputImageTexture, threeStepsPositiveTextureCoordinate).r;
float threeStepsNegativeIntensity = texture2D(inputImageTexture, threeStepsNegativeTextureCoordinate).r;
float fourStepsPositiveIntensity = texture2D(inputImageTexture, fourStepsPositiveTextureCoordinate).r;
float fourStepsNegativeIntensity = texture2D(inputImageTexture, fourStepsNegativeTextureCoordinate).r;
lowp float maxValue = max(centerIntensity, oneStepPositiveIntensity);
maxValue = max(maxValue, oneStepNegativeIntensity);
maxValue = max(maxValue, twoStepsPositiveIntensity);
maxValue = max(maxValue, twoStepsNegativeIntensity);
maxValue = max(maxValue, threeStepsPositiveIntensity);
maxValue = max(maxValue, threeStepsNegativeIntensity);
maxValue = max(maxValue, fourStepsPositiveIntensity);
maxValue = max(maxValue, fourStepsNegativeIntensity);
gl_FragColor = vec4(vec3(maxValue), 1.0);
}
);
#else
NSString *const kGPUImageDilationRadiusOneFragmentShaderString = SHADER_STRING
(
varying vec2 centerTextureCoordinate;
varying vec2 oneStepPositiveTextureCoordinate;
varying vec2 oneStepNegativeTextureCoordinate;
uniform sampler2D inputImageTexture;
void main()
{
float centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate).r;
float oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate).r;
float oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate).r;
float maxValue = max(centerIntensity, oneStepPositiveIntensity);
maxValue = max(maxValue, oneStepNegativeIntensity);
gl_FragColor = vec4(vec3(maxValue), 1.0);
}
);
NSString *const kGPUImageDilationRadiusTwoFragmentShaderString = SHADER_STRING
(
varying vec2 centerTextureCoordinate;
varying vec2 oneStepPositiveTextureCoordinate;
varying vec2 oneStepNegativeTextureCoordinate;
varying vec2 twoStepsPositiveTextureCoordinate;
varying vec2 twoStepsNegativeTextureCoordinate;
uniform sampler2D inputImageTexture;
void main()
{
float centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate).r;
float oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate).r;
float oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate).r;
float twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate).r;
float twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate).r;
float maxValue = max(centerIntensity, oneStepPositiveIntensity);
maxValue = max(maxValue, oneStepNegativeIntensity);
maxValue = max(maxValue, twoStepsPositiveIntensity);
maxValue = max(maxValue, twoStepsNegativeIntensity);
gl_FragColor = vec4(vec3(maxValue), 1.0);
}
);
NSString *const kGPUImageDilationRadiusThreeFragmentShaderString = SHADER_STRING
(
varying vec2 centerTextureCoordinate;
varying vec2 oneStepPositiveTextureCoordinate;
varying vec2 oneStepNegativeTextureCoordinate;
varying vec2 twoStepsPositiveTextureCoordinate;
varying vec2 twoStepsNegativeTextureCoordinate;
varying vec2 threeStepsPositiveTextureCoordinate;
varying vec2 threeStepsNegativeTextureCoordinate;
uniform sampler2D inputImageTexture;
void main()
{
float centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate).r;
float oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate).r;
float oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate).r;
float twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate).r;
float twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate).r;
float threeStepsPositiveIntensity = texture2D(inputImageTexture, threeStepsPositiveTextureCoordinate).r;
float threeStepsNegativeIntensity = texture2D(inputImageTexture, threeStepsNegativeTextureCoordinate).r;
float maxValue = max(centerIntensity, oneStepPositiveIntensity);
maxValue = max(maxValue, oneStepNegativeIntensity);
maxValue = max(maxValue, twoStepsPositiveIntensity);
maxValue = max(maxValue, twoStepsNegativeIntensity);
maxValue = max(maxValue, threeStepsPositiveIntensity);
maxValue = max(maxValue, threeStepsNegativeIntensity);
gl_FragColor = vec4(vec3(maxValue), 1.0);
}
);
NSString *const kGPUImageDilationRadiusFourFragmentShaderString = SHADER_STRING
(
varying vec2 centerTextureCoordinate;
varying vec2 oneStepPositiveTextureCoordinate;
varying vec2 oneStepNegativeTextureCoordinate;
varying vec2 twoStepsPositiveTextureCoordinate;
varying vec2 twoStepsNegativeTextureCoordinate;
varying vec2 threeStepsPositiveTextureCoordinate;
varying vec2 threeStepsNegativeTextureCoordinate;
varying vec2 fourStepsPositiveTextureCoordinate;
varying vec2 fourStepsNegativeTextureCoordinate;
uniform sampler2D inputImageTexture;
void main()
{
float centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate).r;
float oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate).r;
float oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate).r;
float twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate).r;
float twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate).r;
float threeStepsPositiveIntensity = texture2D(inputImageTexture, threeStepsPositiveTextureCoordinate).r;
float threeStepsNegativeIntensity = texture2D(inputImageTexture, threeStepsNegativeTextureCoordinate).r;
float fourStepsPositiveIntensity = texture2D(inputImageTexture, fourStepsPositiveTextureCoordinate).r;
float fourStepsNegativeIntensity = texture2D(inputImageTexture, fourStepsNegativeTextureCoordinate).r;
float maxValue = max(centerIntensity, oneStepPositiveIntensity);
maxValue = max(maxValue, oneStepNegativeIntensity);
maxValue = max(maxValue, twoStepsPositiveIntensity);
maxValue = max(maxValue, twoStepsNegativeIntensity);
maxValue = max(maxValue, threeStepsPositiveIntensity);
maxValue = max(maxValue, threeStepsNegativeIntensity);
maxValue = max(maxValue, fourStepsPositiveIntensity);
maxValue = max(maxValue, fourStepsNegativeIntensity);
gl_FragColor = vec4(vec3(maxValue), 1.0);
}
);
#endif
#pragma mark -
#pragma mark Initialization and teardown
- (id)initWithRadius:(NSUInteger)dilationRadius;
{
NSString *fragmentShaderForThisRadius = nil;
NSString *vertexShaderForThisRadius = nil;
switch (dilationRadius)
{
case 0:
case 1:
{
vertexShaderForThisRadius = kGPUImageDilationRadiusOneVertexShaderString;
fragmentShaderForThisRadius = kGPUImageDilationRadiusOneFragmentShaderString;
}; break;
case 2:
{
vertexShaderForThisRadius = kGPUImageDilationRadiusTwoVertexShaderString;
fragmentShaderForThisRadius = kGPUImageDilationRadiusTwoFragmentShaderString;
}; break;
case 3:
{
vertexShaderForThisRadius = kGPUImageDilationRadiusThreeVertexShaderString;
fragmentShaderForThisRadius = kGPUImageDilationRadiusThreeFragmentShaderString;
}; break;
case 4:
{
vertexShaderForThisRadius = kGPUImageDilationRadiusFourVertexShaderString;
fragmentShaderForThisRadius = kGPUImageDilationRadiusFourFragmentShaderString;
}; break;
default:
{
vertexShaderForThisRadius = kGPUImageDilationRadiusFourVertexShaderString;
fragmentShaderForThisRadius = kGPUImageDilationRadiusFourFragmentShaderString;
}; break;
}
if (!(self = [super initWithFirstStageVertexShaderFromString:vertexShaderForThisRadius firstStageFragmentShaderFromString:fragmentShaderForThisRadius secondStageVertexShaderFromString:vertexShaderForThisRadius secondStageFragmentShaderFromString:fragmentShaderForThisRadius]))
{
return nil;
}
return self;
}
- (id)init;
{
if (!(self = [self initWithRadius:1]))
{
return nil;
}
return self;
}
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageDilationFilter.m
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 3,719
|
```objective-c
#import "GPUImageSingleComponentGaussianBlurFilter.h"
@implementation GPUImageSingleComponentGaussianBlurFilter
+ (NSString *)vertexShaderForOptimizedBlurOfRadius:(NSUInteger)blurRadius sigma:(CGFloat)sigma;
{
if (blurRadius < 1)
{
return kGPUImageVertexShaderString;
}
// First, generate the normal Gaussian weights for a given sigma
GLfloat *standardGaussianWeights = calloc(blurRadius + 1, sizeof(GLfloat));
GLfloat sumOfWeights = 0.0;
for (NSUInteger currentGaussianWeightIndex = 0; currentGaussianWeightIndex < blurRadius + 1; currentGaussianWeightIndex++)
{
standardGaussianWeights[currentGaussianWeightIndex] = (1.0 / sqrt(2.0 * M_PI * pow(sigma, 2.0))) * exp(-pow(currentGaussianWeightIndex, 2.0) / (2.0 * pow(sigma, 2.0)));
if (currentGaussianWeightIndex == 0)
{
sumOfWeights += standardGaussianWeights[currentGaussianWeightIndex];
}
else
{
sumOfWeights += 2.0 * standardGaussianWeights[currentGaussianWeightIndex];
}
}
// Next, normalize these weights to prevent the clipping of the Gaussian curve at the end of the discrete samples from reducing luminance
for (NSUInteger currentGaussianWeightIndex = 0; currentGaussianWeightIndex < blurRadius + 1; currentGaussianWeightIndex++)
{
standardGaussianWeights[currentGaussianWeightIndex] = standardGaussianWeights[currentGaussianWeightIndex] / sumOfWeights;
}
// From these weights we calculate the offsets to read interpolated values from
NSUInteger numberOfOptimizedOffsets = MIN(blurRadius / 2 + (blurRadius % 2), 7);
GLfloat *optimizedGaussianOffsets = calloc(numberOfOptimizedOffsets, sizeof(GLfloat));
for (NSUInteger currentOptimizedOffset = 0; currentOptimizedOffset < numberOfOptimizedOffsets; currentOptimizedOffset++)
{
GLfloat firstWeight = standardGaussianWeights[currentOptimizedOffset*2 + 1];
GLfloat secondWeight = standardGaussianWeights[currentOptimizedOffset*2 + 2];
GLfloat optimizedWeight = firstWeight + secondWeight;
optimizedGaussianOffsets[currentOptimizedOffset] = (firstWeight * (currentOptimizedOffset*2 + 1) + secondWeight * (currentOptimizedOffset*2 + 2)) / optimizedWeight;
}
NSMutableString *shaderString = [[NSMutableString alloc] init];
// Header
[shaderString appendFormat:@"\
attribute vec4 position;\n\
attribute vec4 inputTextureCoordinate;\n\
\n\
uniform float texelWidthOffset;\n\
uniform float texelHeightOffset;\n\
\n\
varying vec2 blurCoordinates[%lu];\n\
\n\
void main()\n\
{\n\
gl_Position = position;\n\
\n\
vec2 singleStepOffset = vec2(texelWidthOffset, texelHeightOffset);\n", (unsigned long)(1 + (numberOfOptimizedOffsets * 2))];
// Inner offset loop
[shaderString appendString:@"blurCoordinates[0] = inputTextureCoordinate.xy;\n"];
for (NSUInteger currentOptimizedOffset = 0; currentOptimizedOffset < numberOfOptimizedOffsets; currentOptimizedOffset++)
{
[shaderString appendFormat:@"\
blurCoordinates[%lu] = inputTextureCoordinate.xy + singleStepOffset * %f;\n\
blurCoordinates[%lu] = inputTextureCoordinate.xy - singleStepOffset * %f;\n", (unsigned long)((currentOptimizedOffset * 2) + 1), optimizedGaussianOffsets[currentOptimizedOffset], (unsigned long)((currentOptimizedOffset * 2) + 2), optimizedGaussianOffsets[currentOptimizedOffset]];
}
// Footer
[shaderString appendString:@"}\n"];
free(optimizedGaussianOffsets);
free(standardGaussianWeights);
return shaderString;
}
+ (NSString *)fragmentShaderForOptimizedBlurOfRadius:(NSUInteger)blurRadius sigma:(CGFloat)sigma;
{
if (blurRadius < 1)
{
return kGPUImagePassthroughFragmentShaderString;
}
// First, generate the normal Gaussian weights for a given sigma
GLfloat *standardGaussianWeights = calloc(blurRadius + 1, sizeof(GLfloat));
GLfloat sumOfWeights = 0.0;
for (NSUInteger currentGaussianWeightIndex = 0; currentGaussianWeightIndex < blurRadius + 1; currentGaussianWeightIndex++)
{
standardGaussianWeights[currentGaussianWeightIndex] = (1.0 / sqrt(2.0 * M_PI * pow(sigma, 2.0))) * exp(-pow(currentGaussianWeightIndex, 2.0) / (2.0 * pow(sigma, 2.0)));
if (currentGaussianWeightIndex == 0)
{
sumOfWeights += standardGaussianWeights[currentGaussianWeightIndex];
}
else
{
sumOfWeights += 2.0 * standardGaussianWeights[currentGaussianWeightIndex];
}
}
// Next, normalize these weights to prevent the clipping of the Gaussian curve at the end of the discrete samples from reducing luminance
for (NSUInteger currentGaussianWeightIndex = 0; currentGaussianWeightIndex < blurRadius + 1; currentGaussianWeightIndex++)
{
standardGaussianWeights[currentGaussianWeightIndex] = standardGaussianWeights[currentGaussianWeightIndex] / sumOfWeights;
}
// From these weights we calculate the offsets to read interpolated values from
NSUInteger numberOfOptimizedOffsets = MIN(blurRadius / 2 + (blurRadius % 2), 7);
NSUInteger trueNumberOfOptimizedOffsets = blurRadius / 2 + (blurRadius % 2);
NSMutableString *shaderString = [[NSMutableString alloc] init];
// Header
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
[shaderString appendFormat:@"\
uniform sampler2D inputImageTexture;\n\
uniform highp float texelWidthOffset;\n\
uniform highp float texelHeightOffset;\n\
\n\
varying highp vec2 blurCoordinates[%lu];\n\
\n\
void main()\n\
{\n\
lowp float sum = 0.0;\n", (unsigned long)(1 + (numberOfOptimizedOffsets * 2)) ];
#else
[shaderString appendFormat:@"\
uniform sampler2D inputImageTexture;\n\
uniform float texelWidthOffset;\n\
uniform float texelHeightOffset;\n\
\n\
varying vec2 blurCoordinates[%lu];\n\
\n\
void main()\n\
{\n\
float sum = 0.0;\n", 1 + (numberOfOptimizedOffsets * 2) ];
#endif
// Inner texture loop
[shaderString appendFormat:@"sum += texture2D(inputImageTexture, blurCoordinates[0]).r * %f;\n", standardGaussianWeights[0]];
for (NSUInteger currentBlurCoordinateIndex = 0; currentBlurCoordinateIndex < numberOfOptimizedOffsets; currentBlurCoordinateIndex++)
{
GLfloat firstWeight = standardGaussianWeights[currentBlurCoordinateIndex * 2 + 1];
GLfloat secondWeight = standardGaussianWeights[currentBlurCoordinateIndex * 2 + 2];
GLfloat optimizedWeight = firstWeight + secondWeight;
[shaderString appendFormat:@"sum += texture2D(inputImageTexture, blurCoordinates[%lu]).r * %f;\n", (unsigned long)((currentBlurCoordinateIndex * 2) + 1), optimizedWeight];
[shaderString appendFormat:@"sum += texture2D(inputImageTexture, blurCoordinates[%lu]).r * %f;\n", (unsigned long)((currentBlurCoordinateIndex * 2) + 2), optimizedWeight];
}
// If the number of required samples exceeds the amount we can pass in via varyings, we have to do dependent texture reads in the fragment shader
if (trueNumberOfOptimizedOffsets > numberOfOptimizedOffsets)
{
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
[shaderString appendString:@"highp vec2 singleStepOffset = vec2(texelWidthOffset, texelHeightOffset);\n"];
#else
[shaderString appendString:@"highp vec2 singleStepOffset = vec2(texelWidthOffset, texelHeightOffset);\n"];
#endif
for (NSUInteger currentOverlowTextureRead = numberOfOptimizedOffsets; currentOverlowTextureRead < trueNumberOfOptimizedOffsets; currentOverlowTextureRead++)
{
GLfloat firstWeight = standardGaussianWeights[currentOverlowTextureRead * 2 + 1];
GLfloat secondWeight = standardGaussianWeights[currentOverlowTextureRead * 2 + 2];
GLfloat optimizedWeight = firstWeight + secondWeight;
GLfloat optimizedOffset = (firstWeight * (currentOverlowTextureRead * 2 + 1) + secondWeight * (currentOverlowTextureRead * 2 + 2)) / optimizedWeight;
[shaderString appendFormat:@"sum += texture2D(inputImageTexture, blurCoordinates[0] + singleStepOffset * %f).r * %f;\n", optimizedOffset, optimizedWeight];
[shaderString appendFormat:@"sum += texture2D(inputImageTexture, blurCoordinates[0] - singleStepOffset * %f).r * %f;\n", optimizedOffset, optimizedWeight];
}
}
// Footer
[shaderString appendString:@"\
gl_FragColor = vec4(sum, sum, sum, 1.0);\n\
}\n"];
free(standardGaussianWeights);
return shaderString;
}
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageSingleComponentGaussianBlurFilter.m
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 2,139
|
```objective-c
#import "GPUImage3x3ConvolutionFilter.h"
@interface GPUImageEmbossFilter : GPUImage3x3ConvolutionFilter
// The strength of the embossing, from 0.0 to 4.0, with 1.0 as the normal level
@property(readwrite, nonatomic) CGFloat intensity;
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageEmbossFilter.h
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 72
|
```objective-c
#import "GPUImageTwoInputFilter.h"
@interface GPUImageColorBlendFilter : GPUImageTwoInputFilter
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageColorBlendFilter.h
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 25
|
```objective-c
#import "GPUImageGaussianBlurFilter.h"
@interface GPUImageBilateralFilter : GPUImageGaussianBlurFilter
{
CGFloat firstDistanceNormalizationFactorUniform;
CGFloat secondDistanceNormalizationFactorUniform;
}
// A normalization factor for the distance between central color and sample color.
@property(nonatomic, readwrite) CGFloat distanceNormalizationFactor;
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageBilateralFilter.h
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 70
|
```objective-c
#import "GPUImageCrosshairGenerator.h"
NSString *const kGPUImageCrosshairVertexShaderString = SHADER_STRING
(
attribute vec4 position;
uniform float crosshairWidth;
varying vec2 centerLocation;
varying float pointSpacing;
void main()
{
gl_Position = vec4(((position.xy * 2.0) - 1.0), 0.0, 1.0);
gl_PointSize = crosshairWidth + 1.0;
pointSpacing = 1.0 / crosshairWidth;
centerLocation = vec2(pointSpacing * ceil(crosshairWidth / 2.0), pointSpacing * ceil(crosshairWidth / 2.0));
}
);
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageCrosshairFragmentShaderString = SHADER_STRING
(
uniform lowp vec3 crosshairColor;
varying highp vec2 centerLocation;
varying highp float pointSpacing;
void main()
{
lowp vec2 distanceFromCenter = abs(centerLocation - gl_PointCoord.xy);
lowp float axisTest = step(pointSpacing, gl_PointCoord.y) * step(distanceFromCenter.x, 0.09) + step(pointSpacing, gl_PointCoord.x) * step(distanceFromCenter.y, 0.09);
gl_FragColor = vec4(crosshairColor * axisTest, axisTest);
// gl_FragColor = vec4(distanceFromCenterInX, distanceFromCenterInY, 0.0, 1.0);
}
);
#else
NSString *const kGPUImageCrosshairFragmentShaderString = SHADER_STRING
(
GPUImageEscapedHashIdentifier(version 120)\n
uniform vec3 crosshairColor;
varying vec2 centerLocation;
varying float pointSpacing;
void main()
{
vec2 distanceFromCenter = abs(centerLocation - gl_PointCoord.xy);
float axisTest = step(pointSpacing, gl_PointCoord.y) * step(distanceFromCenter.x, 0.09) + step(pointSpacing, gl_PointCoord.x) * step(distanceFromCenter.y, 0.09);
gl_FragColor = vec4(crosshairColor * axisTest, axisTest);
// gl_FragColor = vec4(distanceFromCenterInX, distanceFromCenterInY, 0.0, 1.0);
}
);
#endif
@implementation GPUImageCrosshairGenerator
@synthesize crosshairWidth = _crosshairWidth;
#pragma mark -
#pragma mark Initialization and teardown
- (id)init;
{
if (!(self = [super initWithVertexShaderFromString:kGPUImageCrosshairVertexShaderString fragmentShaderFromString:kGPUImageCrosshairFragmentShaderString]))
{
return nil;
}
runSynchronouslyOnVideoProcessingQueue(^{
crosshairWidthUniform = [filterProgram uniformIndex:@"crosshairWidth"];
crosshairColorUniform = [filterProgram uniformIndex:@"crosshairColor"];
self.crosshairWidth = 5.0;
[self setCrosshairColorRed:0.0 green:1.0 blue:0.0];
});
return self;
}
#pragma mark -
#pragma mark Rendering
- (void)renderCrosshairsFromArray:(GLfloat *)crosshairCoordinates count:(NSUInteger)numberOfCrosshairs frameTime:(CMTime)frameTime;
{
if (self.preventRendering)
{
return;
}
runSynchronouslyOnVideoProcessingQueue(^{
[GPUImageContext setActiveShaderProgram:filterProgram];
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
#else
glEnable(GL_POINT_SPRITE);
glEnable(GL_VERTEX_PROGRAM_POINT_SIZE);
#endif
outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:[self sizeOfFBO] textureOptions:self.outputTextureOptions onlyTexture:NO];
[outputFramebuffer activateFramebuffer];
glClearColor(0.0, 0.0, 0.0, 0.0);
glClear(GL_COLOR_BUFFER_BIT);
glVertexAttribPointer(filterPositionAttribute, 2, GL_FLOAT, 0, 0, crosshairCoordinates);
glDrawArrays(GL_POINTS, 0, (GLsizei)numberOfCrosshairs);
[self informTargetsAboutNewFrameAtTime:frameTime];
});
}
- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;
{
// Prevent rendering of the frame by normal means
}
#pragma mark -
#pragma mark Accessors
- (void)setCrosshairWidth:(CGFloat)newValue;
{
_crosshairWidth = newValue;
[self setFloat:_crosshairWidth forUniform:crosshairWidthUniform program:filterProgram];
}
- (void)setCrosshairColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent;
{
GPUVector3 crosshairColor = {redComponent, greenComponent, blueComponent};
[self setVec3:crosshairColor forUniform:crosshairColorUniform program:filterProgram];
}
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageCrosshairGenerator.m
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 1,057
|
```objective-c
#import "GPUImageDivideBlendFilter.h"
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageDivideBlendFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
varying highp vec2 textureCoordinate2;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
void main()
{
mediump vec4 base = texture2D(inputImageTexture, textureCoordinate);
mediump vec4 overlay = texture2D(inputImageTexture2, textureCoordinate2);
mediump float ra;
if (overlay.a == 0.0 || ((base.r / overlay.r) > (base.a / overlay.a)))
ra = overlay.a * base.a + overlay.r * (1.0 - base.a) + base.r * (1.0 - overlay.a);
else
ra = (base.r * overlay.a * overlay.a) / overlay.r + overlay.r * (1.0 - base.a) + base.r * (1.0 - overlay.a);
mediump float ga;
if (overlay.a == 0.0 || ((base.g / overlay.g) > (base.a / overlay.a)))
ga = overlay.a * base.a + overlay.g * (1.0 - base.a) + base.g * (1.0 - overlay.a);
else
ga = (base.g * overlay.a * overlay.a) / overlay.g + overlay.g * (1.0 - base.a) + base.g * (1.0 - overlay.a);
mediump float ba;
if (overlay.a == 0.0 || ((base.b / overlay.b) > (base.a / overlay.a)))
ba = overlay.a * base.a + overlay.b * (1.0 - base.a) + base.b * (1.0 - overlay.a);
else
ba = (base.b * overlay.a * overlay.a) / overlay.b + overlay.b * (1.0 - base.a) + base.b * (1.0 - overlay.a);
mediump float a = overlay.a + base.a - overlay.a * base.a;
gl_FragColor = vec4(ra, ga, ba, a);
}
);
#else
NSString *const kGPUImageDivideBlendFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
varying vec2 textureCoordinate2;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
void main()
{
vec4 base = texture2D(inputImageTexture, textureCoordinate);
vec4 overlay = texture2D(inputImageTexture2, textureCoordinate2);
float ra;
if (overlay.a == 0.0 || ((base.r / overlay.r) > (base.a / overlay.a)))
ra = overlay.a * base.a + overlay.r * (1.0 - base.a) + base.r * (1.0 - overlay.a);
else
ra = (base.r * overlay.a * overlay.a) / overlay.r + overlay.r * (1.0 - base.a) + base.r * (1.0 - overlay.a);
float ga;
if (overlay.a == 0.0 || ((base.g / overlay.g) > (base.a / overlay.a)))
ga = overlay.a * base.a + overlay.g * (1.0 - base.a) + base.g * (1.0 - overlay.a);
else
ga = (base.g * overlay.a * overlay.a) / overlay.g + overlay.g * (1.0 - base.a) + base.g * (1.0 - overlay.a);
float ba;
if (overlay.a == 0.0 || ((base.b / overlay.b) > (base.a / overlay.a)))
ba = overlay.a * base.a + overlay.b * (1.0 - base.a) + base.b * (1.0 - overlay.a);
else
ba = (base.b * overlay.a * overlay.a) / overlay.b + overlay.b * (1.0 - base.a) + base.b * (1.0 - overlay.a);
float a = overlay.a + base.a - overlay.a * base.a;
gl_FragColor = vec4(ra, ga, ba, a);
}
);
#endif
@implementation GPUImageDivideBlendFilter
- (id)init;
{
if (!(self = [super initWithFragmentShaderFromString:kGPUImageDivideBlendFragmentShaderString]))
{
return nil;
}
return self;
}
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageDivideBlendFilter.m
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 974
|
```objective-c
#import "GPUImageSaturationFilter.h"
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageSaturationFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform lowp float saturation;
// Values from "Graphics Shaders: Theory and Practice" by Bailey and Cunningham
const mediump vec3 luminanceWeighting = vec3(0.2125, 0.7154, 0.0721);
void main()
{
lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
lowp float luminance = dot(textureColor.rgb, luminanceWeighting);
lowp vec3 greyScaleColor = vec3(luminance);
gl_FragColor = vec4(mix(greyScaleColor, textureColor.rgb, saturation), textureColor.w);
}
);
#else
NSString *const kGPUImageSaturationFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform float saturation;
// Values from "Graphics Shaders: Theory and Practice" by Bailey and Cunningham
const vec3 luminanceWeighting = vec3(0.2125, 0.7154, 0.0721);
void main()
{
vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
float luminance = dot(textureColor.rgb, luminanceWeighting);
vec3 greyScaleColor = vec3(luminance);
gl_FragColor = vec4(mix(greyScaleColor, textureColor.rgb, saturation), textureColor.w);
}
);
#endif
@implementation GPUImageSaturationFilter
@synthesize saturation = _saturation;
#pragma mark -
#pragma mark Initialization and teardown
- (id)init;
{
if (!(self = [super initWithFragmentShaderFromString:kGPUImageSaturationFragmentShaderString]))
{
return nil;
}
saturationUniform = [filterProgram uniformIndex:@"saturation"];
self.saturation = 1.0;
return self;
}
#pragma mark -
#pragma mark Accessors
- (void)setSaturation:(CGFloat)newValue;
{
_saturation = newValue;
[self setFloat:_saturation forUniform:saturationUniform program:filterProgram];
}
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageSaturationFilter.m
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 493
|
```objective-c
#import "GPUImageFilter.h"
@interface GPUImagePerlinNoiseFilter : GPUImageFilter
{
GLint scaleUniform, colorStartUniform, colorFinishUniform;
}
@property (readwrite, nonatomic) GPUVector4 colorStart;
@property (readwrite, nonatomic) GPUVector4 colorFinish;
@property (readwrite, nonatomic) float scale;
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImagePerlinNoiseFilter.h
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 73
|
```objective-c
#import "GPUImageTwoPassTextureSamplingFilter.h"
/** A Gaussian blur filter
Interpolated optimization based on Daniel Rkos' work at path_to_url
*/
@interface GPUImageGaussianBlurFilter : GPUImageTwoPassTextureSamplingFilter
{
BOOL shouldResizeBlurRadiusWithImageSize;
CGFloat _blurRadiusInPixels;
}
/** A multiplier for the spacing between texels, ranging from 0.0 on up, with a default of 1.0. Adjusting this may slightly increase the blur strength, but will introduce artifacts in the result.
*/
@property (readwrite, nonatomic) CGFloat texelSpacingMultiplier;
/** A radius in pixels to use for the blur, with a default of 2.0. This adjusts the sigma variable in the Gaussian distribution function.
*/
@property (readwrite, nonatomic) CGFloat blurRadiusInPixels;
/** Setting these properties will allow the blur radius to scale with the size of the image. These properties are mutually exclusive; setting either will set the other to 0.
*/
@property (readwrite, nonatomic) CGFloat blurRadiusAsFractionOfImageWidth;
@property (readwrite, nonatomic) CGFloat blurRadiusAsFractionOfImageHeight;
/// The number of times to sequentially blur the incoming image. The more passes, the slower the filter.
@property(readwrite, nonatomic) NSUInteger blurPasses;
+ (NSString *)vertexShaderForStandardBlurOfRadius:(NSUInteger)blurRadius sigma:(CGFloat)sigma;
+ (NSString *)fragmentShaderForStandardBlurOfRadius:(NSUInteger)blurRadius sigma:(CGFloat)sigma;
+ (NSString *)vertexShaderForOptimizedBlurOfRadius:(NSUInteger)blurRadius sigma:(CGFloat)sigma;
+ (NSString *)fragmentShaderForOptimizedBlurOfRadius:(NSUInteger)blurRadius sigma:(CGFloat)sigma;
- (void)switchToVertexShader:(NSString *)newVertexShader fragmentShader:(NSString *)newFragmentShader;
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageGaussianBlurFilter.h
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 391
|
```objective-c
#import "GPUImageAdaptiveThresholdFilter.h"
#import "GPUImageFilter.h"
#import "GPUImageTwoInputFilter.h"
#import "GPUImageGrayscaleFilter.h"
#import "GPUImageBoxBlurFilter.h"
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageAdaptiveThresholdFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
varying highp vec2 textureCoordinate2;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
void main()
{
highp float blurredInput = texture2D(inputImageTexture, textureCoordinate).r;
highp float localLuminance = texture2D(inputImageTexture2, textureCoordinate2).r;
highp float thresholdResult = step(blurredInput - 0.05, localLuminance);
gl_FragColor = vec4(vec3(thresholdResult), 1.0);
}
);
#else
NSString *const kGPUImageAdaptiveThresholdFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
varying vec2 textureCoordinate2;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
void main()
{
float blurredInput = texture2D(inputImageTexture, textureCoordinate).r;
float localLuminance = texture2D(inputImageTexture2, textureCoordinate2).r;
float thresholdResult = step(blurredInput - 0.05, localLuminance);
gl_FragColor = vec4(vec3(thresholdResult), 1.0);
}
);
#endif
@interface GPUImageAdaptiveThresholdFilter()
{
GPUImageBoxBlurFilter *boxBlurFilter;
}
@end
@implementation GPUImageAdaptiveThresholdFilter
#pragma mark -
#pragma mark Initialization and teardown
- (id)init;
{
if (!(self = [super init]))
{
return nil;
}
// First pass: reduce to luminance
GPUImageGrayscaleFilter *luminanceFilter = [[GPUImageGrayscaleFilter alloc] init];
[self addFilter:luminanceFilter];
// Second pass: perform a box blur
boxBlurFilter = [[GPUImageBoxBlurFilter alloc] init];
[self addFilter:boxBlurFilter];
// Third pass: compare the blurred background luminance to the local value
GPUImageFilter *adaptiveThresholdFilter = [[GPUImageTwoInputFilter alloc] initWithFragmentShaderFromString:kGPUImageAdaptiveThresholdFragmentShaderString];
[self addFilter:adaptiveThresholdFilter];
[luminanceFilter addTarget:boxBlurFilter];
[boxBlurFilter addTarget:adaptiveThresholdFilter];
// To prevent double updating of this filter, disable updates from the sharp luminance image side
[luminanceFilter addTarget:adaptiveThresholdFilter];
self.initialFilters = [NSArray arrayWithObject:luminanceFilter];
self.terminalFilter = adaptiveThresholdFilter;
return self;
}
#pragma mark -
#pragma mark Accessors
- (void)setBlurRadiusInPixels:(CGFloat)newValue;
{
boxBlurFilter.blurRadiusInPixels = newValue;
}
- (CGFloat)blurRadiusInPixels;
{
return boxBlurFilter.blurRadiusInPixels;
}
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageAdaptiveThresholdFilter.m
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 684
|
```objective-c
#import "GPUImage3x3TextureSamplingFilter.h"
/** This uses Sobel edge detection to place a black border around objects,
and then it quantizes the colors present in the image to give a cartoon-like quality to the image.
*/
@interface GPUImageToonFilter : GPUImage3x3TextureSamplingFilter
{
GLint thresholdUniform, quantizationLevelsUniform;
}
/** The threshold at which to apply the edges, default of 0.2
*/
@property(readwrite, nonatomic) CGFloat threshold;
/** The levels of quantization for the posterization of colors within the scene, with a default of 10.0
*/
@property(readwrite, nonatomic) CGFloat quantizationLevels;
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageToonFilter.h
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 146
|
```objective-c
#import "GPUImageAlphaBlendFilter.h"
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageAlphaBlendFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
varying highp vec2 textureCoordinate2;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
uniform lowp float mixturePercent;
void main()
{
lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
lowp vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);
gl_FragColor = vec4(mix(textureColor.rgb, textureColor2.rgb, textureColor2.a * mixturePercent), textureColor.a);
}
);
#else
NSString *const kGPUImageAlphaBlendFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
varying vec2 textureCoordinate2;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
uniform float mixturePercent;
void main()
{
vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);
gl_FragColor = vec4(mix(textureColor.rgb, textureColor2.rgb, textureColor2.a * mixturePercent), textureColor.a);
}
);
#endif
@implementation GPUImageAlphaBlendFilter
@synthesize mix = _mix;
- (id)init;
{
if (!(self = [super initWithFragmentShaderFromString:kGPUImageAlphaBlendFragmentShaderString]))
{
return nil;
}
mixUniform = [filterProgram uniformIndex:@"mixturePercent"];
self.mix = 0.5;
return self;
}
#pragma mark -
#pragma mark Accessors
- (void)setMix:(CGFloat)newValue;
{
_mix = newValue;
[self setFloat:_mix forUniform:mixUniform program:filterProgram];
}
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageAlphaBlendFilter.m
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 413
|
```objective-c
#import "GPUImageFilterGroup.h"
@class GPUImageGaussianBlurFilter;
@interface GPUImageUnsharpMaskFilter : GPUImageFilterGroup
{
GPUImageGaussianBlurFilter *blurFilter;
GPUImageFilter *unsharpMaskFilter;
}
// The blur radius of the underlying Gaussian blur. The default is 4.0.
@property (readwrite, nonatomic) CGFloat blurRadiusInPixels;
// The strength of the sharpening, from 0.0 on up, with a default of 1.0
@property(readwrite, nonatomic) CGFloat intensity;
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageUnsharpMaskFilter.h
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 120
|
```objective-c
#import "GPUImageTwoInputFilter.h"
@interface GPUImageSaturationBlendFilter : GPUImageTwoInputFilter
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageSaturationBlendFilter.h
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 26
|
```objective-c
#import "GPUImageFilter.h"
@interface GPUImageHighlightShadowFilter : GPUImageFilter
{
GLint shadowsUniform, highlightsUniform;
}
/**
* 0 - 1, increase to lighten shadows.
* @default 0
*/
@property(readwrite, nonatomic) CGFloat shadows;
/**
* 0 - 1, decrease to darken highlights.
* @default 1
*/
@property(readwrite, nonatomic) CGFloat highlights;
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageHighlightShadowFilter.h
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 89
|
```objective-c
#import "GPUImageFASTCornerDetectionFilter.h"
#import "GPUImageGrayscaleFilter.h"
#import "GPUImage3x3TextureSamplingFilter.h"
#import "GPUImageNonMaximumSuppressionFilter.h"
// 14 total texture coordinates from vertex shader for non-dependent reads
// 3 texture coordinates for dependent reads, then
NSString *const kGPUImageFASTDetectorFragmentShaderString = SHADER_STRING
(
precision highp float;
varying vec2 textureCoordinate;
varying vec2 leftTextureCoordinate;
varying vec2 rightTextureCoordinate;
varying vec2 topTextureCoordinate;
varying vec2 topLeftTextureCoordinate;
varying vec2 topRightTextureCoordinate;
varying vec2 bottomTextureCoordinate;
varying vec2 bottomLeftTextureCoordinate;
varying vec2 bottomRightTextureCoordinate;
uniform sampler2D inputImageTexture;
uniform sampler2D lookupTable;
void main()
{
lowp float centerIntensity = texture2D(inputImageTexture, textureCoordinate).r;
lowp float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r;
lowp float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r;
lowp float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r;
lowp float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r;
lowp float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r;
lowp float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r;
lowp float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r;
lowp float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r;
lowp float byteTally = 1.0 / 255.0 * step(centerIntensity, topRightIntensity);
byteTally += 2.0 / 255.0 * step(centerIntensity, topIntensity);
byteTally += 4.0 / 255.0 * step(centerIntensity, topLeftIntensity);
byteTally += 8.0 / 255.0 * step(centerIntensity, leftIntensity);
byteTally += 16.0 / 255.0 * step(centerIntensity, bottomLeftIntensity);
byteTally += 32.0 / 255.0 * step(centerIntensity, bottomIntensity);
byteTally += 64.0 / 255.0 * step(centerIntensity, bottomRightIntensity);
byteTally += 128.0 / 255.0 * step(centerIntensity, rightIntensity);
// TODO: Replace the above with a dot product and two vec4s
// TODO: Apply step to a matrix, rather than individually
gl_FragColor = vec4(byteTally, byteTally, byteTally, 1.0);
}
);
@implementation GPUImageFASTCornerDetectionFilter
- (id)init;
{
if (!(self = [self initWithFASTDetectorVariant:kGPUImageFAST12ContiguousNonMaximumSuppressed]))
{
return nil;
}
return self;
}
- (id)initWithFASTDetectorVariant:(GPUImageFASTDetectorType)detectorType;
{
if (!(self = [super init]))
{
return nil;
}
// [derivativeFilter addTarget:blurFilter];
// [blurFilter addTarget:harrisCornerDetectionFilter];
// [harrisCornerDetectionFilter addTarget:nonMaximumSuppressionFilter];
// [simpleThresholdFilter addTarget:colorPackingFilter];
// self.initialFilters = [NSArray arrayWithObjects:derivativeFilter, nil];
// self.terminalFilter = colorPackingFilter;
// self.terminalFilter = nonMaximumSuppressionFilter;
return self;
}
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageFASTCornerDetectionFilter.m
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 797
|
```objective-c
#import "GPUImageFilter.h"
extern NSString *const kGPUImageTwoInputTextureVertexShaderString;
@interface GPUImageTwoInputFilter : GPUImageFilter
{
GPUImageFramebuffer *secondInputFramebuffer;
GLint filterSecondTextureCoordinateAttribute;
GLint filterInputTextureUniform2;
GPUImageRotationMode inputRotation2;
CMTime firstFrameTime, secondFrameTime;
BOOL hasSetFirstTexture, hasReceivedFirstFrame, hasReceivedSecondFrame, firstFrameWasVideo, secondFrameWasVideo;
BOOL firstFrameCheckDisabled, secondFrameCheckDisabled;
}
- (void)disableFirstFrameCheck;
- (void)disableSecondFrameCheck;
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageTwoInputFilter.h
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 140
|
```objective-c
// adapted from unitzeroone - path_to_url
#import "GPUImageJFAVoronoiFilter.h"
// The shaders are mostly taken from UnitZeroOne's WebGL example here:
// path_to_url
NSString *const kGPUImageJFAVoronoiVertexShaderString = SHADER_STRING
(
attribute vec4 position;
attribute vec4 inputTextureCoordinate;
uniform float sampleStep;
varying vec2 textureCoordinate;
varying vec2 leftTextureCoordinate;
varying vec2 rightTextureCoordinate;
varying vec2 topTextureCoordinate;
varying vec2 topLeftTextureCoordinate;
varying vec2 topRightTextureCoordinate;
varying vec2 bottomTextureCoordinate;
varying vec2 bottomLeftTextureCoordinate;
varying vec2 bottomRightTextureCoordinate;
void main()
{
gl_Position = position;
vec2 widthStep = vec2(sampleStep, 0.0);
vec2 heightStep = vec2(0.0, sampleStep);
vec2 widthHeightStep = vec2(sampleStep);
vec2 widthNegativeHeightStep = vec2(sampleStep, -sampleStep);
textureCoordinate = inputTextureCoordinate.xy;
leftTextureCoordinate = inputTextureCoordinate.xy - widthStep;
rightTextureCoordinate = inputTextureCoordinate.xy + widthStep;
topTextureCoordinate = inputTextureCoordinate.xy - heightStep;
topLeftTextureCoordinate = inputTextureCoordinate.xy - widthHeightStep;
topRightTextureCoordinate = inputTextureCoordinate.xy + widthNegativeHeightStep;
bottomTextureCoordinate = inputTextureCoordinate.xy + heightStep;
bottomLeftTextureCoordinate = inputTextureCoordinate.xy - widthNegativeHeightStep;
bottomRightTextureCoordinate = inputTextureCoordinate.xy + widthHeightStep;
}
);
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageJFAVoronoiFragmentShaderString = SHADER_STRING
(
precision highp float;
varying vec2 textureCoordinate;
varying vec2 leftTextureCoordinate;
varying vec2 rightTextureCoordinate;
varying vec2 topTextureCoordinate;
varying vec2 topLeftTextureCoordinate;
varying vec2 topRightTextureCoordinate;
varying vec2 bottomTextureCoordinate;
varying vec2 bottomLeftTextureCoordinate;
varying vec2 bottomRightTextureCoordinate;
uniform sampler2D inputImageTexture;
uniform vec2 size;
//varying vec2 textureCoordinate;
//uniform float sampleStep;
vec2 getCoordFromColor(vec4 color)
{
float z = color.z * 256.0;
float yoff = floor(z / 8.0);
float xoff = mod(z, 8.0);
float x = color.x*256.0 + xoff*256.0;
float y = color.y*256.0 + yoff*256.0;
return vec2(x,y) / size;
}
void main(void) {
vec2 sub;
vec4 dst;
vec4 local = texture2D(inputImageTexture, textureCoordinate);
vec4 sam;
float l;
float smallestDist;
if(local.a == 0.0){
smallestDist = dot(1.0,1.0);
}else{
sub = getCoordFromColor(local)-textureCoordinate;
smallestDist = dot(sub,sub);
}
dst = local;
sam = texture2D(inputImageTexture, topRightTextureCoordinate);
if(sam.a == 1.0){
sub = (getCoordFromColor(sam)-textureCoordinate);
l = dot(sub,sub);
if(l < smallestDist){
smallestDist = l;
dst = sam;
}
}
sam = texture2D(inputImageTexture, topTextureCoordinate);
if(sam.a == 1.0){
sub = (getCoordFromColor(sam)-textureCoordinate);
l = dot(sub,sub);
if(l < smallestDist){
smallestDist = l;
dst = sam;
}
}
sam = texture2D(inputImageTexture, topLeftTextureCoordinate);
if(sam.a == 1.0){
sub = (getCoordFromColor(sam)-textureCoordinate);
l = dot(sub,sub);
if(l < smallestDist){
smallestDist = l;
dst = sam;
}
}
sam = texture2D(inputImageTexture, bottomRightTextureCoordinate);
if(sam.a == 1.0){
sub = (getCoordFromColor(sam)-textureCoordinate);
l = dot(sub,sub);
if(l < smallestDist){
smallestDist = l;
dst = sam;
}
}
sam = texture2D(inputImageTexture, bottomTextureCoordinate);
if(sam.a == 1.0){
sub = (getCoordFromColor(sam)-textureCoordinate);
l = dot(sub,sub);
if(l < smallestDist){
smallestDist = l;
dst = sam;
}
}
sam = texture2D(inputImageTexture, bottomLeftTextureCoordinate);
if(sam.a == 1.0){
sub = (getCoordFromColor(sam)-textureCoordinate);
l = dot(sub,sub);
if(l < smallestDist){
smallestDist = l;
dst = sam;
}
}
sam = texture2D(inputImageTexture, leftTextureCoordinate);
if(sam.a == 1.0){
sub = (getCoordFromColor(sam)-textureCoordinate);
l = dot(sub,sub);
if(l < smallestDist){
smallestDist = l;
dst = sam;
}
}
sam = texture2D(inputImageTexture, rightTextureCoordinate);
if(sam.a == 1.0){
sub = (getCoordFromColor(sam)-textureCoordinate);
l = dot(sub,sub);
if(l < smallestDist){
smallestDist = l;
dst = sam;
}
}
gl_FragColor = dst;
}
);
#else
NSString *const kGPUImageJFAVoronoiFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
varying vec2 leftTextureCoordinate;
varying vec2 rightTextureCoordinate;
varying vec2 topTextureCoordinate;
varying vec2 topLeftTextureCoordinate;
varying vec2 topRightTextureCoordinate;
varying vec2 bottomTextureCoordinate;
varying vec2 bottomLeftTextureCoordinate;
varying vec2 bottomRightTextureCoordinate;
uniform sampler2D inputImageTexture;
uniform vec2 size;
//varying vec2 textureCoordinate;
//uniform float sampleStep;
vec2 getCoordFromColor(vec4 color)
{
float z = color.z * 256.0;
float yoff = floor(z / 8.0);
float xoff = mod(z, 8.0);
float x = color.x*256.0 + xoff*256.0;
float y = color.y*256.0 + yoff*256.0;
return vec2(x,y) / size;
}
void main(void) {
vec2 sub;
vec4 dst;
vec4 local = texture2D(inputImageTexture, textureCoordinate);
vec4 sam;
float l;
float smallestDist;
if(local.a == 0.0){
smallestDist = dot(1.0,1.0);
}else{
sub = getCoordFromColor(local)-textureCoordinate;
smallestDist = dot(sub,sub);
}
dst = local;
sam = texture2D(inputImageTexture, topRightTextureCoordinate);
if(sam.a == 1.0){
sub = (getCoordFromColor(sam)-textureCoordinate);
l = dot(sub,sub);
if(l < smallestDist){
smallestDist = l;
dst = sam;
}
}
sam = texture2D(inputImageTexture, topTextureCoordinate);
if(sam.a == 1.0){
sub = (getCoordFromColor(sam)-textureCoordinate);
l = dot(sub,sub);
if(l < smallestDist){
smallestDist = l;
dst = sam;
}
}
sam = texture2D(inputImageTexture, topLeftTextureCoordinate);
if(sam.a == 1.0){
sub = (getCoordFromColor(sam)-textureCoordinate);
l = dot(sub,sub);
if(l < smallestDist){
smallestDist = l;
dst = sam;
}
}
sam = texture2D(inputImageTexture, bottomRightTextureCoordinate);
if(sam.a == 1.0){
sub = (getCoordFromColor(sam)-textureCoordinate);
l = dot(sub,sub);
if(l < smallestDist){
smallestDist = l;
dst = sam;
}
}
sam = texture2D(inputImageTexture, bottomTextureCoordinate);
if(sam.a == 1.0){
sub = (getCoordFromColor(sam)-textureCoordinate);
l = dot(sub,sub);
if(l < smallestDist){
smallestDist = l;
dst = sam;
}
}
sam = texture2D(inputImageTexture, bottomLeftTextureCoordinate);
if(sam.a == 1.0){
sub = (getCoordFromColor(sam)-textureCoordinate);
l = dot(sub,sub);
if(l < smallestDist){
smallestDist = l;
dst = sam;
}
}
sam = texture2D(inputImageTexture, leftTextureCoordinate);
if(sam.a == 1.0){
sub = (getCoordFromColor(sam)-textureCoordinate);
l = dot(sub,sub);
if(l < smallestDist){
smallestDist = l;
dst = sam;
}
}
sam = texture2D(inputImageTexture, rightTextureCoordinate);
if(sam.a == 1.0){
sub = (getCoordFromColor(sam)-textureCoordinate);
l = dot(sub,sub);
if(l < smallestDist){
smallestDist = l;
dst = sam;
}
}
gl_FragColor = dst;
}
);
#endif
@interface GPUImageJFAVoronoiFilter() {
int currentPass;
}
@end
@implementation GPUImageJFAVoronoiFilter
@synthesize sizeInPixels = _sizeInPixels;
- (id)init;
{
if (!(self = [super initWithVertexShaderFromString:kGPUImageJFAVoronoiVertexShaderString fragmentShaderFromString:kGPUImageJFAVoronoiFragmentShaderString]))
{
NSLog(@"nil returned");
return nil;
}
sampleStepUniform = [filterProgram uniformIndex:@"sampleStep"];
sizeUniform = [filterProgram uniformIndex:@"size"];
//[self disableSecondFrameCheck];
return self;
}
-(void)setSizeInPixels:(CGSize)sizeInPixels {
_sizeInPixels = sizeInPixels;
//validate that it's a power of 2
float width = log2(sizeInPixels.width);
float height = log2(sizeInPixels.height);
if (width != height) {
NSLog(@"Voronoi point texture must be square");
return;
}
if (width != floor(width) || height != floor(height)) {
NSLog(@"Voronoi point texture must be a power of 2. Texture size: %f, %f", sizeInPixels.width, sizeInPixels.height);
return;
}
glUniform2f(sizeUniform, _sizeInPixels.width, _sizeInPixels.height);
}
#pragma mark -
#pragma mark Managing the display FBOs
-(NSUInteger)nextPowerOfTwo:(CGPoint)input {
NSUInteger val;
if (input.x > input.y) {
val = (NSUInteger)input.x;
} else {
val = (NSUInteger)input.y;
}
val--;
val = (val >> 1) | val;
val = (val >> 2) | val;
val = (val >> 4) | val;
val = (val >> 8) | val;
val = (val >> 16) | val;
val++;
return val;
}
//- (void)setOutputFBO;
//{
// if (currentPass % 2 == 1) {
// [self setSecondFilterFBO];
// } else {
// [self setFilterFBO];
// }
//
//}
- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;
{
// Run the first stage of the two-pass filter
[GPUImageContext setActiveShaderProgram:filterProgram];
currentPass = 0;
outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:[self sizeOfFBO] textureOptions:self.outputTextureOptions onlyTexture:NO];
[outputFramebuffer activateFramebuffer];
glActiveTexture(GL_TEXTURE2);
glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
glUniform1f(sampleStepUniform, 0.5);
glUniform2f(sizeUniform, _sizeInPixels.width, _sizeInPixels.height);
glBindTexture(GL_TEXTURE_2D, [firstInputFramebuffer texture]);
glUniform1i(filterInputTextureUniform, 2);
glVertexAttribPointer(filterPositionAttribute, 2, GL_FLOAT, 0, 0, vertices);
glVertexAttribPointer(filterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
for (int pass = 1; pass <= numPasses + 1; pass++) {
currentPass = pass;
// [self setOutputFBO];
//glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
glActiveTexture(GL_TEXTURE2);
if (pass % 2 == 0) {
glBindTexture(GL_TEXTURE_2D, secondFilterOutputTexture);
} else {
glBindTexture(GL_TEXTURE_2D, [outputFramebuffer texture]);
}
glUniform1i(filterInputTextureUniform, 2);
float step = pow(2.0, numPasses - pass) / pow(2.0, numPasses);
glUniform1f(sampleStepUniform, step);
glUniform2f(sizeUniform, _sizeInPixels.width, _sizeInPixels.height);
glVertexAttribPointer(filterPositionAttribute, 2, GL_FLOAT, 0, 0, vertices);
glVertexAttribPointer(filterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
}
}
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageJFAVoronoiFilter.m
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 3,165
|
```objective-c
#import "GPUImageKuwaharaFilter.h"
// Sourced from Kyprianidis, J. E., Kang, H., and Doellner, J. "Anisotropic Kuwahara Filtering on the GPU," GPU Pro p.247 (2010).
//
// Original header:
//
// Anisotropic Kuwahara Filtering on the GPU
// by Jan Eric Kyprianidis <www.kyprianidis.com>
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageKuwaharaFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform int radius;
precision highp float;
const vec2 src_size = vec2 (1.0 / 768.0, 1.0 / 1024.0);
void main (void)
{
vec2 uv = textureCoordinate;
float n = float((radius + 1) * (radius + 1));
int i; int j;
vec3 m0 = vec3(0.0); vec3 m1 = vec3(0.0); vec3 m2 = vec3(0.0); vec3 m3 = vec3(0.0);
vec3 s0 = vec3(0.0); vec3 s1 = vec3(0.0); vec3 s2 = vec3(0.0); vec3 s3 = vec3(0.0);
vec3 c;
for (j = -radius; j <= 0; ++j) {
for (i = -radius; i <= 0; ++i) {
c = texture2D(inputImageTexture, uv + vec2(i,j) * src_size).rgb;
m0 += c;
s0 += c * c;
}
}
for (j = -radius; j <= 0; ++j) {
for (i = 0; i <= radius; ++i) {
c = texture2D(inputImageTexture, uv + vec2(i,j) * src_size).rgb;
m1 += c;
s1 += c * c;
}
}
for (j = 0; j <= radius; ++j) {
for (i = 0; i <= radius; ++i) {
c = texture2D(inputImageTexture, uv + vec2(i,j) * src_size).rgb;
m2 += c;
s2 += c * c;
}
}
for (j = 0; j <= radius; ++j) {
for (i = -radius; i <= 0; ++i) {
c = texture2D(inputImageTexture, uv + vec2(i,j) * src_size).rgb;
m3 += c;
s3 += c * c;
}
}
float min_sigma2 = 1e+2;
m0 /= n;
s0 = abs(s0 / n - m0 * m0);
float sigma2 = s0.r + s0.g + s0.b;
if (sigma2 < min_sigma2) {
min_sigma2 = sigma2;
gl_FragColor = vec4(m0, 1.0);
}
m1 /= n;
s1 = abs(s1 / n - m1 * m1);
sigma2 = s1.r + s1.g + s1.b;
if (sigma2 < min_sigma2) {
min_sigma2 = sigma2;
gl_FragColor = vec4(m1, 1.0);
}
m2 /= n;
s2 = abs(s2 / n - m2 * m2);
sigma2 = s2.r + s2.g + s2.b;
if (sigma2 < min_sigma2) {
min_sigma2 = sigma2;
gl_FragColor = vec4(m2, 1.0);
}
m3 /= n;
s3 = abs(s3 / n - m3 * m3);
sigma2 = s3.r + s3.g + s3.b;
if (sigma2 < min_sigma2) {
min_sigma2 = sigma2;
gl_FragColor = vec4(m3, 1.0);
}
}
);
#else
NSString *const kGPUImageKuwaharaFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform int radius;
const vec2 src_size = vec2 (1.0 / 768.0, 1.0 / 1024.0);
void main (void)
{
vec2 uv = textureCoordinate;
float n = float((radius + 1) * (radius + 1));
int i; int j;
vec3 m0 = vec3(0.0); vec3 m1 = vec3(0.0); vec3 m2 = vec3(0.0); vec3 m3 = vec3(0.0);
vec3 s0 = vec3(0.0); vec3 s1 = vec3(0.0); vec3 s2 = vec3(0.0); vec3 s3 = vec3(0.0);
vec3 c;
for (j = -radius; j <= 0; ++j) {
for (i = -radius; i <= 0; ++i) {
c = texture2D(inputImageTexture, uv + vec2(i,j) * src_size).rgb;
m0 += c;
s0 += c * c;
}
}
for (j = -radius; j <= 0; ++j) {
for (i = 0; i <= radius; ++i) {
c = texture2D(inputImageTexture, uv + vec2(i,j) * src_size).rgb;
m1 += c;
s1 += c * c;
}
}
for (j = 0; j <= radius; ++j) {
for (i = 0; i <= radius; ++i) {
c = texture2D(inputImageTexture, uv + vec2(i,j) * src_size).rgb;
m2 += c;
s2 += c * c;
}
}
for (j = 0; j <= radius; ++j) {
for (i = -radius; i <= 0; ++i) {
c = texture2D(inputImageTexture, uv + vec2(i,j) * src_size).rgb;
m3 += c;
s3 += c * c;
}
}
float min_sigma2 = 1e+2;
m0 /= n;
s0 = abs(s0 / n - m0 * m0);
float sigma2 = s0.r + s0.g + s0.b;
if (sigma2 < min_sigma2) {
min_sigma2 = sigma2;
gl_FragColor = vec4(m0, 1.0);
}
m1 /= n;
s1 = abs(s1 / n - m1 * m1);
sigma2 = s1.r + s1.g + s1.b;
if (sigma2 < min_sigma2) {
min_sigma2 = sigma2;
gl_FragColor = vec4(m1, 1.0);
}
m2 /= n;
s2 = abs(s2 / n - m2 * m2);
sigma2 = s2.r + s2.g + s2.b;
if (sigma2 < min_sigma2) {
min_sigma2 = sigma2;
gl_FragColor = vec4(m2, 1.0);
}
m3 /= n;
s3 = abs(s3 / n - m3 * m3);
sigma2 = s3.r + s3.g + s3.b;
if (sigma2 < min_sigma2) {
min_sigma2 = sigma2;
gl_FragColor = vec4(m3, 1.0);
}
}
);
#endif
@implementation GPUImageKuwaharaFilter
@synthesize radius = _radius;
#pragma mark -
#pragma mark Initialization and teardown
- (id)init;
{
if (!(self = [super initWithFragmentShaderFromString:kGPUImageKuwaharaFragmentShaderString]))
{
return nil;
}
radiusUniform = [filterProgram uniformIndex:@"radius"];
self.radius = 3;
return self;
}
#pragma mark -
#pragma mark Accessors
- (void)setRadius:(NSUInteger)newValue;
{
_radius = newValue;
[self setInteger:(GLint)_radius forUniform:radiusUniform program:filterProgram];
}
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageKuwaharaFilter.m
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 1,926
|
```objective-c
#import "GPUImageTwoInputFilter.h"
@interface GPUImageLookupFilter : GPUImageTwoInputFilter
{
GLint intensityUniform;
}
// How To Use:
// 1) Use your favourite photo editing application to apply a filter to lookup.png from GPUImage/framework/Resources.
// For this to work properly each pixel color must not depend on other pixels (e.g. blur will not work).
// If you need more complex filter you can create as many lookup tables as required.
// E.g. color_balance_lookup_1.png -> GPUImageGaussianBlurFilter -> color_balance_lookup_2.png
// 2) Use you new lookup.png file as a second input for GPUImageLookupFilter.
// See GPUImageAmatorkaFilter, GPUImageMissEtikateFilter, and GPUImageSoftEleganceFilter for example.
// Additional Info:
// Lookup texture is organised as 8x8 quads of 64x64 pixels representing all possible RGB colors:
//for (int by = 0; by < 8; by++) {
// for (int bx = 0; bx < 8; bx++) {
// for (int g = 0; g < 64; g++) {
// for (int r = 0; r < 64; r++) {
// image.setPixel(r + bx * 64, g + by * 64, qRgb((int)(r * 255.0 / 63.0 + 0.5),
// (int)(g * 255.0 / 63.0 + 0.5),
// (int)((bx + by * 8.0) * 255.0 / 63.0 + 0.5)));
// }
// }
// }
//}
// Opacity/intensity of lookup filter ranges from 0.0 to 1.0, with 1.0 as the normal setting
@property(readwrite, nonatomic) CGFloat intensity;
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageLookupFilter.h
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 415
|
```objective-c
#import "GPUImageSourceOverBlendFilter.h"
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageSourceOverBlendFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
varying highp vec2 textureCoordinate2;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
void main()
{
lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
lowp vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate);
gl_FragColor = mix(textureColor, textureColor2, textureColor2.a);
}
);
#else
NSString *const kGPUImageSourceOverBlendFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
varying vec2 textureCoordinate2;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
void main()
{
vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate);
gl_FragColor = mix(textureColor, textureColor2, textureColor2.a);
}
);
#endif
@implementation GPUImageSourceOverBlendFilter
- (id)init;
{
if (!(self = [super initWithFragmentShaderFromString:kGPUImageSourceOverBlendFragmentShaderString]))
{
return nil;
}
return self;
}
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageSourceOverBlendFilter.m
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 306
|
```objective-c
#import "GPUImageFilter.h"
@interface GPUImageCrosshatchFilter : GPUImageFilter
{
GLint crossHatchSpacingUniform, lineWidthUniform;
}
// The fractional width of the image to use as the spacing for the crosshatch. The default is 0.03.
@property(readwrite, nonatomic) CGFloat crossHatchSpacing;
// A relative width for the crosshatch lines. The default is 0.003.
@property(readwrite, nonatomic) CGFloat lineWidth;
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageCrosshatchFilter.h
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 101
|
```objective-c
#import <Foundation/Foundation.h>
#import "GPUImageContext.h"
@protocol GPUImageTextureOutputDelegate;
@interface GPUImageTextureOutput : NSObject <GPUImageInput>
{
GPUImageFramebuffer *firstInputFramebuffer;
}
@property(readwrite, unsafe_unretained, nonatomic) id<GPUImageTextureOutputDelegate> delegate;
@property(readonly) GLuint texture;
@property(nonatomic) BOOL enabled;
- (void)doneWithTexture;
@end
@protocol GPUImageTextureOutputDelegate
- (void)newFrameReadyFromTextureOutput:(GPUImageTextureOutput *)callbackTextureOutput;
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageTextureOutput.h
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 115
|
```objective-c
#import "GPUImageLuminosity.h"
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageInitialLuminosityFragmentShaderString = SHADER_STRING
(
precision highp float;
uniform sampler2D inputImageTexture;
varying highp vec2 outputTextureCoordinate;
varying highp vec2 upperLeftInputTextureCoordinate;
varying highp vec2 upperRightInputTextureCoordinate;
varying highp vec2 lowerLeftInputTextureCoordinate;
varying highp vec2 lowerRightInputTextureCoordinate;
const highp vec3 W = vec3(0.2125, 0.7154, 0.0721);
void main()
{
highp float upperLeftLuminance = dot(texture2D(inputImageTexture, upperLeftInputTextureCoordinate).rgb, W);
highp float upperRightLuminance = dot(texture2D(inputImageTexture, upperRightInputTextureCoordinate).rgb, W);
highp float lowerLeftLuminance = dot(texture2D(inputImageTexture, lowerLeftInputTextureCoordinate).rgb, W);
highp float lowerRightLuminance = dot(texture2D(inputImageTexture, lowerRightInputTextureCoordinate).rgb, W);
highp float luminosity = 0.25 * (upperLeftLuminance + upperRightLuminance + lowerLeftLuminance + lowerRightLuminance);
gl_FragColor = vec4(luminosity, luminosity, luminosity, 1.0);
}
);
NSString *const kGPUImageLuminosityFragmentShaderString = SHADER_STRING
(
precision highp float;
uniform sampler2D inputImageTexture;
varying highp vec2 outputTextureCoordinate;
varying highp vec2 upperLeftInputTextureCoordinate;
varying highp vec2 upperRightInputTextureCoordinate;
varying highp vec2 lowerLeftInputTextureCoordinate;
varying highp vec2 lowerRightInputTextureCoordinate;
void main()
{
highp float upperLeftLuminance = texture2D(inputImageTexture, upperLeftInputTextureCoordinate).r;
highp float upperRightLuminance = texture2D(inputImageTexture, upperRightInputTextureCoordinate).r;
highp float lowerLeftLuminance = texture2D(inputImageTexture, lowerLeftInputTextureCoordinate).r;
highp float lowerRightLuminance = texture2D(inputImageTexture, lowerRightInputTextureCoordinate).r;
highp float luminosity = 0.25 * (upperLeftLuminance + upperRightLuminance + lowerLeftLuminance + lowerRightLuminance);
gl_FragColor = vec4(luminosity, luminosity, luminosity, 1.0);
}
);
#else
NSString *const kGPUImageInitialLuminosityFragmentShaderString = SHADER_STRING
(
uniform sampler2D inputImageTexture;
varying vec2 outputTextureCoordinate;
varying vec2 upperLeftInputTextureCoordinate;
varying vec2 upperRightInputTextureCoordinate;
varying vec2 lowerLeftInputTextureCoordinate;
varying vec2 lowerRightInputTextureCoordinate;
const vec3 W = vec3(0.2125, 0.7154, 0.0721);
void main()
{
float upperLeftLuminance = dot(texture2D(inputImageTexture, upperLeftInputTextureCoordinate).rgb, W);
float upperRightLuminance = dot(texture2D(inputImageTexture, upperRightInputTextureCoordinate).rgb, W);
float lowerLeftLuminance = dot(texture2D(inputImageTexture, lowerLeftInputTextureCoordinate).rgb, W);
float lowerRightLuminance = dot(texture2D(inputImageTexture, lowerRightInputTextureCoordinate).rgb, W);
float luminosity = 0.25 * (upperLeftLuminance + upperRightLuminance + lowerLeftLuminance + lowerRightLuminance);
gl_FragColor = vec4(luminosity, luminosity, luminosity, 1.0);
}
);
NSString *const kGPUImageLuminosityFragmentShaderString = SHADER_STRING
(
uniform sampler2D inputImageTexture;
varying vec2 outputTextureCoordinate;
varying vec2 upperLeftInputTextureCoordinate;
varying vec2 upperRightInputTextureCoordinate;
varying vec2 lowerLeftInputTextureCoordinate;
varying vec2 lowerRightInputTextureCoordinate;
void main()
{
float upperLeftLuminance = texture2D(inputImageTexture, upperLeftInputTextureCoordinate).r;
float upperRightLuminance = texture2D(inputImageTexture, upperRightInputTextureCoordinate).r;
float lowerLeftLuminance = texture2D(inputImageTexture, lowerLeftInputTextureCoordinate).r;
float lowerRightLuminance = texture2D(inputImageTexture, lowerRightInputTextureCoordinate).r;
float luminosity = 0.25 * (upperLeftLuminance + upperRightLuminance + lowerLeftLuminance + lowerRightLuminance);
gl_FragColor = vec4(luminosity, luminosity, luminosity, 1.0);
}
);
#endif
@implementation GPUImageLuminosity
@synthesize luminosityProcessingFinishedBlock = _luminosityProcessingFinishedBlock;
#pragma mark -
#pragma mark Initialization and teardown
- (id)init;
{
if (!(self = [super initWithVertexShaderFromString:kGPUImageColorAveragingVertexShaderString fragmentShaderFromString:kGPUImageInitialLuminosityFragmentShaderString]))
{
return nil;
}
texelWidthUniform = [filterProgram uniformIndex:@"texelWidth"];
texelHeightUniform = [filterProgram uniformIndex:@"texelHeight"];
__unsafe_unretained GPUImageLuminosity *weakSelf = self;
[self setFrameProcessingCompletionBlock:^(GPUImageOutput *filter, CMTime frameTime) {
[weakSelf extractLuminosityAtFrameTime:frameTime];
}];
runSynchronouslyOnVideoProcessingQueue(^{
[GPUImageContext useImageProcessingContext];
secondFilterProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageColorAveragingVertexShaderString fragmentShaderString:kGPUImageLuminosityFragmentShaderString];
if (!secondFilterProgram.initialized)
{
[self initializeSecondaryAttributes];
if (![secondFilterProgram link])
{
NSString *progLog = [secondFilterProgram programLog];
NSLog(@"Program link log: %@", progLog);
NSString *fragLog = [secondFilterProgram fragmentShaderLog];
NSLog(@"Fragment shader compile log: %@", fragLog);
NSString *vertLog = [secondFilterProgram vertexShaderLog];
NSLog(@"Vertex shader compile log: %@", vertLog);
filterProgram = nil;
NSAssert(NO, @"Filter shader link failed");
}
}
secondFilterPositionAttribute = [secondFilterProgram attributeIndex:@"position"];
secondFilterTextureCoordinateAttribute = [secondFilterProgram attributeIndex:@"inputTextureCoordinate"];
secondFilterInputTextureUniform = [secondFilterProgram uniformIndex:@"inputImageTexture"]; // This does assume a name of "inputImageTexture" for the fragment shader
secondFilterInputTextureUniform2 = [secondFilterProgram uniformIndex:@"inputImageTexture2"]; // This does assume a name of "inputImageTexture2" for second input texture in the fragment shader
secondFilterTexelWidthUniform = [secondFilterProgram uniformIndex:@"texelWidth"];
secondFilterTexelHeightUniform = [secondFilterProgram uniformIndex:@"texelHeight"];
[GPUImageContext setActiveShaderProgram:secondFilterProgram];
glEnableVertexAttribArray(secondFilterPositionAttribute);
glEnableVertexAttribArray(secondFilterTextureCoordinateAttribute);
});
return self;
}
- (void)initializeSecondaryAttributes;
{
[secondFilterProgram addAttribute:@"position"];
[secondFilterProgram addAttribute:@"inputTextureCoordinate"];
}
/*
- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;
{
if (self.preventRendering)
{
[firstInputFramebuffer unlock];
return;
}
// Do an initial render pass that both convert to luminance and reduces
[GPUImageContext setActiveShaderProgram:filterProgram];
glVertexAttribPointer(filterPositionAttribute, 2, GL_FLOAT, 0, 0, vertices);
glVertexAttribPointer(filterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);
GLuint currentFramebuffer = [[stageFramebuffers objectAtIndex:0] intValue];
glBindFramebuffer(GL_FRAMEBUFFER, currentFramebuffer);
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
CGSize currentStageSize = [[stageSizes objectAtIndex:0] CGSizeValue];
#else
NSSize currentStageSize = [[stageSizes objectAtIndex:0] sizeValue];
#endif
glViewport(0, 0, (int)currentStageSize.width, (int)currentStageSize.height);
GLuint currentTexture = [firstInputFramebuffer texture];
glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
glActiveTexture(GL_TEXTURE2);
glBindTexture(GL_TEXTURE_2D, currentTexture);
glUniform1i(filterInputTextureUniform, 2);
glUniform1f(texelWidthUniform, 0.5 / currentStageSize.width);
glUniform1f(texelHeightUniform, 0.5 / currentStageSize.height);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
currentTexture = [[stageTextures objectAtIndex:0] intValue];
// Just perform reductions from this point on
[GPUImageContext setActiveShaderProgram:secondFilterProgram];
glVertexAttribPointer(secondFilterPositionAttribute, 2, GL_FLOAT, 0, 0, vertices);
glVertexAttribPointer(secondFilterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);
NSUInteger numberOfStageFramebuffers = [stageFramebuffers count];
for (NSUInteger currentStage = 1; currentStage < numberOfStageFramebuffers; currentStage++)
{
currentFramebuffer = [[stageFramebuffers objectAtIndex:currentStage] intValue];
glBindFramebuffer(GL_FRAMEBUFFER, currentFramebuffer);
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
currentStageSize = [[stageSizes objectAtIndex:currentStage] CGSizeValue];
#else
currentStageSize = [[stageSizes objectAtIndex:currentStage] sizeValue];
#endif
glViewport(0, 0, (int)currentStageSize.width, (int)currentStageSize.height);
glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
glActiveTexture(GL_TEXTURE2);
glBindTexture(GL_TEXTURE_2D, currentTexture);
glUniform1i(secondFilterInputTextureUniform, 2);
glUniform1f(secondFilterTexelWidthUniform, 0.5 / currentStageSize.width);
glUniform1f(secondFilterTexelHeightUniform, 0.5 / currentStageSize.height);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
currentTexture = [[stageTextures objectAtIndex:currentStage] intValue];
// NSUInteger totalBytesForImage = (int)currentStageSize.width * (int)currentStageSize.height * 4;
// GLubyte *rawImagePixels2 = (GLubyte *)malloc(totalBytesForImage);
// glReadPixels(0, 0, (int)currentStageSize.width, (int)currentStageSize.height, GL_RGBA, GL_UNSIGNED_BYTE, rawImagePixels2);
// CGDataProviderRef dataProvider = CGDataProviderCreateWithData(NULL, rawImagePixels2, totalBytesForImage, NULL);
// CGColorSpaceRef defaultRGBColorSpace = CGColorSpaceCreateDeviceRGB();
//
// CGFloat currentRedTotal = 0.0f, currentGreenTotal = 0.0f, currentBlueTotal = 0.0f, currentAlphaTotal = 0.0f;
// NSUInteger totalNumberOfPixels = totalBytesForImage / 4;
//
// for (NSUInteger currentPixel = 0; currentPixel < totalNumberOfPixels; currentPixel++)
// {
// currentRedTotal += (CGFloat)rawImagePixels2[(currentPixel * 4)] / 255.0f;
// currentGreenTotal += (CGFloat)rawImagePixels2[(currentPixel * 4) + 1] / 255.0f;
// currentBlueTotal += (CGFloat)rawImagePixels2[(currentPixel * 4 + 2)] / 255.0f;
// currentAlphaTotal += (CGFloat)rawImagePixels2[(currentPixel * 4) + 3] / 255.0f;
// }
//
// NSLog(@"Stage %d average image red: %f, green: %f, blue: %f, alpha: %f", currentStage, currentRedTotal / (CGFloat)totalNumberOfPixels, currentGreenTotal / (CGFloat)totalNumberOfPixels, currentBlueTotal / (CGFloat)totalNumberOfPixels, currentAlphaTotal / (CGFloat)totalNumberOfPixels);
//
//
// CGImageRef cgImageFromBytes = CGImageCreate((int)currentStageSize.width, (int)currentStageSize.height, 8, 32, 4 * (int)currentStageSize.width, defaultRGBColorSpace, kCGBitmapByteOrderDefault | kCGImageAlphaLast, dataProvider, NULL, NO, kCGRenderingIntentDefault);
//
// UIImage *imageToSave = [UIImage imageWithCGImage:cgImageFromBytes];
//
// NSData *dataForPNGFile = UIImagePNGRepresentation(imageToSave);
//
// NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
// NSString *documentsDirectory = [paths objectAtIndex:0];
//
// NSString *imageName = [NSString stringWithFormat:@"AverageLevel%d.png", currentStage];
// NSError *error = nil;
// if (![dataForPNGFile writeToFile:[documentsDirectory stringByAppendingPathComponent:imageName] options:NSAtomicWrite error:&error])
// {
// return;
// }
}
[firstInputFramebuffer unlock];
}
*/
#pragma mark -
#pragma mark Callbacks
- (void)extractLuminosityAtFrameTime:(CMTime)frameTime;
{
runSynchronouslyOnVideoProcessingQueue(^{
// we need a normal color texture for this filter
NSAssert(self.outputTextureOptions.internalFormat == GL_RGBA, @"The output texture format for this filter must be GL_RGBA.");
NSAssert(self.outputTextureOptions.type == GL_UNSIGNED_BYTE, @"The type of the output texture of this filter must be GL_UNSIGNED_BYTE.");
NSUInteger totalNumberOfPixels = round(finalStageSize.width * finalStageSize.height);
if (rawImagePixels == NULL)
{
rawImagePixels = (GLubyte *)malloc(totalNumberOfPixels * 4);
}
[GPUImageContext useImageProcessingContext];
[outputFramebuffer activateFramebuffer];
glReadPixels(0, 0, (int)finalStageSize.width, (int)finalStageSize.height, GL_RGBA, GL_UNSIGNED_BYTE, rawImagePixels);
NSUInteger luminanceTotal = 0;
NSUInteger byteIndex = 0;
for (NSUInteger currentPixel = 0; currentPixel < totalNumberOfPixels; currentPixel++)
{
luminanceTotal += rawImagePixels[byteIndex];
byteIndex += 4;
}
CGFloat normalizedLuminosityTotal = (CGFloat)luminanceTotal / (CGFloat)totalNumberOfPixels / 255.0;
if (_luminosityProcessingFinishedBlock != NULL)
{
_luminosityProcessingFinishedBlock(normalizedLuminosityTotal, frameTime);
}
});
}
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageLuminosity.m
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 3,336
|
```objective-c
#import "GPUImageColorBurnBlendFilter.h"
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageColorBurnBlendFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
varying highp vec2 textureCoordinate2;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
void main()
{
mediump vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
mediump vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);
mediump vec4 whiteColor = vec4(1.0);
gl_FragColor = whiteColor - (whiteColor - textureColor) / textureColor2;
}
);
#else
NSString *const kGPUImageColorBurnBlendFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
varying vec2 textureCoordinate2;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
void main()
{
vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);
vec4 whiteColor = vec4(1.0);
gl_FragColor = whiteColor - (whiteColor - textureColor) / textureColor2;
}
);
#endif
@implementation GPUImageColorBurnBlendFilter
- (id)init;
{
if (!(self = [super initWithFragmentShaderFromString:kGPUImageColorBurnBlendFragmentShaderString]))
{
return nil;
}
return self;
}
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageColorBurnBlendFilter.m
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 333
|
```objective-c
#import "GPUImageBulgeDistortionFilter.h"
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageBulgeDistortionFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform highp float aspectRatio;
uniform highp vec2 center;
uniform highp float radius;
uniform highp float scale;
void main()
{
highp vec2 textureCoordinateToUse = vec2(textureCoordinate.x, (textureCoordinate.y * aspectRatio + 0.5 - 0.5 * aspectRatio));
highp float dist = distance(center, textureCoordinateToUse);
textureCoordinateToUse = textureCoordinate;
if (dist < radius)
{
textureCoordinateToUse -= center;
highp float percent = 1.0 - ((radius - dist) / radius) * scale;
percent = percent * percent;
textureCoordinateToUse = textureCoordinateToUse * percent;
textureCoordinateToUse += center;
}
gl_FragColor = texture2D(inputImageTexture, textureCoordinateToUse );
}
);
#else
NSString *const kGPUImageBulgeDistortionFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform float aspectRatio;
uniform vec2 center;
uniform float radius;
uniform float scale;
void main()
{
vec2 textureCoordinateToUse = vec2(textureCoordinate.x, (textureCoordinate.y * aspectRatio + 0.5 - 0.5 * aspectRatio));
float dist = distance(center, textureCoordinateToUse);
textureCoordinateToUse = textureCoordinate;
if (dist < radius)
{
textureCoordinateToUse -= center;
float percent = 1.0 - ((radius - dist) / radius) * scale;
percent = percent * percent;
textureCoordinateToUse = textureCoordinateToUse * percent;
textureCoordinateToUse += center;
}
gl_FragColor = texture2D(inputImageTexture, textureCoordinateToUse );
}
);
#endif
@interface GPUImageBulgeDistortionFilter ()
- (void)adjustAspectRatio;
@property (readwrite, nonatomic) CGFloat aspectRatio;
@end
@implementation GPUImageBulgeDistortionFilter
@synthesize aspectRatio = _aspectRatio;
@synthesize center = _center;
@synthesize radius = _radius;
@synthesize scale = _scale;
#pragma mark -
#pragma mark Initialization and teardown
- (id)init;
{
if (!(self = [super initWithFragmentShaderFromString:kGPUImageBulgeDistortionFragmentShaderString]))
{
return nil;
}
aspectRatioUniform = [filterProgram uniformIndex:@"aspectRatio"];
radiusUniform = [filterProgram uniformIndex:@"radius"];
scaleUniform = [filterProgram uniformIndex:@"scale"];
centerUniform = [filterProgram uniformIndex:@"center"];
self.radius = 0.25;
self.scale = 0.5;
self.center = CGPointMake(0.5, 0.5);
return self;
}
#pragma mark -
#pragma mark Accessors
- (void)adjustAspectRatio;
{
if (GPUImageRotationSwapsWidthAndHeight(inputRotation))
{
[self setAspectRatio:(inputTextureSize.width / inputTextureSize.height)];
}
else
{
[self setAspectRatio:(inputTextureSize.height / inputTextureSize.width)];
}
}
- (void)forceProcessingAtSize:(CGSize)frameSize;
{
[super forceProcessingAtSize:frameSize];
[self adjustAspectRatio];
}
- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;
{
CGSize oldInputSize = inputTextureSize;
[super setInputSize:newSize atIndex:textureIndex];
if ( (!CGSizeEqualToSize(oldInputSize, inputTextureSize)) && (!CGSizeEqualToSize(newSize, CGSizeZero)) )
{
[self adjustAspectRatio];
}
}
- (void)setAspectRatio:(CGFloat)newValue;
{
_aspectRatio = newValue;
[self setFloat:_aspectRatio forUniform:aspectRatioUniform program:filterProgram];
}
- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;
{
[super setInputRotation:newInputRotation atIndex:textureIndex];
[self setCenter:self.center];
[self adjustAspectRatio];
}
- (void)setRadius:(CGFloat)newValue;
{
_radius = newValue;
[self setFloat:_radius forUniform:radiusUniform program:filterProgram];
}
- (void)setScale:(CGFloat)newValue;
{
_scale = newValue;
[self setFloat:_scale forUniform:scaleUniform program:filterProgram];
}
- (void)setCenter:(CGPoint)newValue;
{
_center = newValue;
CGPoint rotatedPoint = [self rotatedPoint:_center forRotation:inputRotation];
[self setPoint:rotatedPoint forUniform:centerUniform program:filterProgram];
}
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageBulgeDistortionFilter.m
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 1,058
|
```objective-c
#import "GPUImageTwoInputFilter.h"
@interface GPUImageMaskFilter : GPUImageTwoInputFilter
@end
```
|
/content/code_sandbox/Pods/GPUImage/framework/Source/GPUImageMaskFilter.h
|
objective-c
| 2016-02-02T02:51:55
| 2024-08-09T08:55:27
|
WMPlayer
|
zhengwenming/WMPlayer
| 3,272
| 24
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.