Created
April 18, 2016 04:34
-
-
Save longbai/68bdd8a4b10e71494af4e9e8ba85f0f9 to your computer and use it in GitHub Desktop.
pili gpu image demo
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
// | |
// PLGPUImageFilterViewController.m | |
// PLStreamingKit | |
// | |
// Created by 0dayZh on 16/3/8. | |
// Copyright © 2016年 0dayZh. All rights reserved. | |
// | |
#import "PLGPUImageFilterViewController.h" | |
#import "GPUImage.h" | |
#import <PLStreamingKit/PLStreamingKit.h> | |
#import "YHTestFilter.h" | |
//#import "YHBeautySkinFilter.h" | |
extern const char *stateNames[]; | |
static OSStatus handleInputBuffer(void *inRefCon, | |
AudioUnitRenderActionFlags *ioActionFlags, | |
const AudioTimeStamp *inTimeStamp, | |
UInt32 inBusNumber, | |
UInt32 inNumberFrames, | |
AudioBufferList *ioData) { | |
@autoreleasepool { | |
PLGPUImageFilterViewController *ref = (__bridge PLGPUImageFilterViewController *)inRefCon; | |
AudioBuffer buffer; | |
buffer.mData = NULL; | |
buffer.mDataByteSize = 0; | |
buffer.mNumberChannels = 2; | |
AudioBufferList buffers; | |
buffers.mNumberBuffers = 1; | |
buffers.mBuffers[0] = buffer; | |
OSStatus status = AudioUnitRender(ref.componetInstance, | |
ioActionFlags, | |
inTimeStamp, | |
inBusNumber, | |
inNumberFrames, | |
&buffers); | |
if(!status) { | |
AudioBuffer audioBuffer = buffers.mBuffers[0]; | |
[ref.session pushAudioBuffer:&audioBuffer]; | |
} | |
return status; | |
} | |
} | |
@interface PLGPUImageFilterViewController () | |
< | |
PLStreamingSessionDelegate, | |
PLStreamingSendingBufferDelegate | |
> | |
@property (nonatomic, strong) GPUImageVideoCamera *videoCamera; | |
//@property (nonatomic, strong) PLStreamingSession *session; | |
@end | |
@implementation PLGPUImageFilterViewController | |
- (void)viewDidLoad { | |
[super viewDidLoad]; | |
// Do any additional setup after loading the view. | |
[self setupGPUImage]; | |
[self setupPili]; | |
[self initMicrophoneSource]; | |
} | |
- (void)setupGPUImage { | |
GPUImageVideoCamera *videoCamera = [[GPUImageVideoCamera alloc] initWithSessionPreset:AVCaptureSessionPreset640x480 cameraPosition:AVCaptureDevicePositionBack]; | |
videoCamera.outputImageOrientation = UIInterfaceOrientationPortrait; | |
// GPUImageSketchFilter *filter = [[GPUImageSketchFilter alloc] init]; | |
// YHTestFilter *filter = [[YHTestFilter alloc] init]; | |
GPUImageColorInvertFilter *filter = [[GPUImageColorInvertFilter alloc] init]; | |
// YHBeautySkinFilter *filter = [[YHBeautySkinFilter alloc] init]; | |
// [filter setBeautyParamsLevel:LIVE_BEAUTY_LEVEL5]; | |
// [filter setOffsetWidth:4.0/480.0 height:4.0/640.0]; | |
// __weak typeof(self) wself = self; | |
// filter.frameProcessingCompletionBlock = ^(GPUImageOutput *output, CMTime time) { | |
// __strong typeof(wself) strongSelf = wself; | |
// if (strongSelf && PLStreamStateConnected == strongSelf.session.streamState) { | |
// GPUImageFramebuffer *imageFramebuffer = output.framebufferForOutput; | |
// CVPixelBufferRef pixelBuffer = [imageFramebuffer renderTarget]; | |
// | |
// if (pixelBuffer) { | |
// CVPixelBufferLockBaseAddress(pixelBuffer, 0); | |
// CVPixelBufferRetain(pixelBuffer); | |
// | |
// [strongSelf.session pushPixelBuffer:pixelBuffer completion:^{ | |
// CVPixelBufferRelease(pixelBuffer); | |
// CVPixelBufferUnlockBaseAddress(pixelBuffer, 0); | |
// }]; | |
// } | |
// } | |
// }; | |
CGRect bounds = [UIScreen mainScreen].bounds; | |
CGFloat width = CGRectGetWidth(bounds); | |
CGFloat height = width * 640.0 / 480.0; | |
GPUImageView *filteredVideoView = [[GPUImageView alloc] initWithFrame:(CGRect){0, 64, width, height}]; | |
// Add the view somewhere so it's visible | |
[self.view addSubview:filteredVideoView]; | |
[videoCamera addTarget:filter]; | |
[filter addTarget:filteredVideoView]; | |
GPUImageRawDataOutput *rawDataOutput = [[GPUImageRawDataOutput alloc] initWithImageSize:CGSizeMake(480, 640) resultsInBGRAFormat:YES]; | |
[filter addTarget:rawDataOutput]; | |
__unsafe_unretained GPUImageRawDataOutput * weakOutput = rawDataOutput; | |
__weak typeof(self) wself = self; | |
[rawDataOutput setNewFrameAvailableBlock:^{ | |
__strong typeof(wself) strongSelf = wself; | |
[weakOutput lockFramebufferForReading]; | |
GLubyte *outputBytes = [weakOutput rawBytesForImage]; | |
NSInteger bytesPerRow = [weakOutput bytesPerRowInOutput]; | |
CVPixelBufferRef pixelBuffer = NULL; | |
CVPixelBufferCreateWithBytes(kCFAllocatorDefault, 480, 640, kCVPixelFormatType_32BGRA, outputBytes, bytesPerRow, nil, nil, nil, &pixelBuffer); | |
[weakOutput unlockFramebufferAfterReading]; | |
if(pixelBuffer == NULL) { | |
return ; | |
} | |
[strongSelf.session pushPixelBuffer:pixelBuffer completion:^{ | |
CVPixelBufferRelease(pixelBuffer); | |
}]; | |
}]; | |
[videoCamera startCameraCapture]; | |
self.videoCamera = videoCamera; | |
} | |
- (void)setupPili { | |
PLVideoStreamingConfiguration *videoConfiguration = [PLVideoStreamingConfiguration configurationWithVideoSize:CGSizeMake(480, 640) videoQuality:kPLVideoStreamingQualityMedium1]; | |
PLAudioStreamingConfiguration *audioConfiguration = [PLAudioStreamingConfiguration defaultConfiguration]; | |
#warning 你需要设定 streamJSON 为自己服务端创建的流 | |
// NSDictionary *streamJSON = | |
// @{@"id":@"z1.pilitest.56cfca1075b6255cdb029db6", | |
// @"createdAt":@"2016-02-26T11:44:16.975+08:00", | |
// @"updatedAt":@"2016-02-26T11:44:16.975+08:00", | |
// @"title":@"56cfca1075b6255cdb029db6", | |
// @"hub":@"pilitest", | |
// @"disabledTill":@(0), | |
// @"disabled":@(NO),//false, | |
// @"publishKey":@"6eeee8a82246636e", | |
// @"publishSecurity":@"static", | |
// @"hosts":@{ | |
// @"publish":@{@"rtmp":@"pili-publish.pilitest.qiniucdn.com"}, | |
// @"live":@{ | |
// @"hdl":@"pili-live-hdl.pilitest.qiniucdn.com", | |
// @"hls":@"pili-live-hls.pilitest.qiniucdn.com", | |
// @"http":@"pili-live-hls.pilitest.qiniucdn.com", | |
// @"rtmp":@"pili-live-rtmp.pilitest.qiniucdn.com" | |
// }, | |
// @"playback":@{@"hls":@"pili-playback.pilitest.qiniucdn.com", | |
// @"http":@"pili-playback.pilitest.qiniucdn.com" | |
// }, | |
// @"play":@{ | |
// @"http":@"pili-live-hls.pilitest.qiniucdn.com", | |
// @"rtmp":@"pili-live-rtmp.pilitest.qiniucdn.com" | |
// } | |
// } | |
// }; | |
NSDictionary *streamJSON = @{@"id": @"z1.dayzhtest.test", | |
@"title": @"test", | |
@"hub": @"dayzhtest", | |
@"publishKey": @"25d79b15-4b50-4fb1-af2e-9898e3c19df5", | |
@"publishSecurity": @"static", // or static | |
@"disabled": @(NO), | |
@"profiles": @[], // or empty Array [] | |
@"hosts": @{ | |
@"publish": @{ | |
@"rtmp": @"vlv5lt.publish.z1.pili.qiniup.com" | |
}, | |
@"play": @{ | |
@"rtmp": @"vlv5lt.live-rtmp.z1.pili.qiniucdn.com" | |
} | |
} | |
}; | |
PLStream *stream = [PLStream streamWithJSON:streamJSON]; | |
// self.session = [[PLStreamingSession alloc] initWithVideoConfiguration:videoConfiguration | |
// audioConfiguration:nil | |
// stream:stream]; | |
// self.session.delegate = self; | |
self.session = [[PLStreamingSession alloc] initWithVideoConfiguration:videoConfiguration audioConfiguration:audioConfiguration stream:stream]; | |
self.session.delegate = self; | |
} | |
static void setSamplerate(){ | |
NSError *err; | |
AVAudioSession *session = [AVAudioSession sharedInstance]; | |
[session setPreferredSampleRate:session.sampleRate error:&err]; | |
if (err != nil) { | |
NSString *log = [NSString stringWithFormat:@"set samplerate failed, %@", err]; | |
NSLog(@"%@", log); | |
return; | |
} | |
if (![session setActive:YES error:&err]) { | |
NSString *log = @"Failed to set audio session active."; | |
NSLog(@"%@ %@", log, err); | |
} | |
} | |
- (void)initMicrophoneSource { | |
__weak typeof(self) wself = self; | |
void (^permissionGranted)(void) = ^{ | |
__strong typeof(wself) strongSelf = wself; | |
AVAudioSession *session = [AVAudioSession sharedInstance]; | |
NSError *error = nil; | |
[session setCategory:AVAudioSessionCategoryPlayAndRecord withOptions:AVAudioSessionCategoryOptionDefaultToSpeaker | AVAudioSessionCategoryOptionMixWithOthers error:nil]; | |
if (![session setActive:YES error:&error]) { | |
NSString *log = @"Failed to set audio session active."; | |
NSLog(@"%@", log); | |
return ; | |
} | |
AudioComponentDescription acd; | |
acd.componentType = kAudioUnitType_Output; | |
acd.componentSubType = kAudioUnitSubType_RemoteIO; | |
acd.componentManufacturer = kAudioUnitManufacturer_Apple; | |
acd.componentFlags = 0; | |
acd.componentFlagsMask = 0; | |
self.component = AudioComponentFindNext(NULL, &acd); | |
OSStatus status = noErr; | |
status = AudioComponentInstanceNew(strongSelf.component, &_componetInstance); | |
if (noErr != status) { | |
NSString *log = @"Failed to new a audio component instance."; | |
NSLog(@"%@", log); | |
return ; | |
} | |
UInt32 flagOne = 1; | |
AudioUnitSetProperty(strongSelf.componetInstance, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Input, 1, &flagOne, sizeof(flagOne)); | |
AudioStreamBasicDescription desc = {0}; | |
desc.mSampleRate = 44100; | |
desc.mFormatID = kAudioFormatLinearPCM; | |
desc.mFormatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked; | |
desc.mChannelsPerFrame = 2; | |
desc.mFramesPerPacket = 1; | |
desc.mBitsPerChannel = 16; | |
desc.mBytesPerFrame = desc.mBitsPerChannel / 8 * desc.mChannelsPerFrame; | |
desc.mBytesPerPacket = desc.mBytesPerFrame * desc.mFramesPerPacket; | |
AURenderCallbackStruct cb; | |
cb.inputProcRefCon = (__bridge void *)(strongSelf); | |
cb.inputProc = handleInputBuffer; | |
AudioUnitSetProperty(strongSelf.componetInstance, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 1, &desc, sizeof(desc)); | |
AudioUnitSetProperty(strongSelf.componetInstance, kAudioOutputUnitProperty_SetInputCallback, kAudioUnitScope_Global, 1, &cb, sizeof(cb)); | |
status = AudioUnitInitialize(strongSelf.componetInstance); | |
if (noErr != status) { | |
NSString *log = @"Failed to init audio unit."; | |
NSLog(@"%@", log); | |
} | |
AudioOutputUnitStart(strongSelf.componetInstance); | |
setSamplerate(); | |
}; | |
void (^noPermission)(void) = ^{ | |
NSString *log = @"No microphone permission."; | |
NSLog(@"%@", log); | |
}; | |
void (^requestPermission)(void) = ^{ | |
[[AVAudioSession sharedInstance] requestRecordPermission:^(BOOL granted) { | |
if (granted) { | |
permissionGranted(); | |
} else { | |
noPermission(); | |
} | |
}]; | |
}; | |
AVAuthorizationStatus status = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeAudio]; | |
switch (status) { | |
case AVAuthorizationStatusAuthorized: | |
permissionGranted(); | |
break; | |
case AVAuthorizationStatusNotDetermined: | |
requestPermission(); | |
break; | |
case AVAuthorizationStatusDenied: | |
case AVAuthorizationStatusRestricted: | |
default: | |
noPermission(); | |
break; | |
} | |
} | |
#pragma mark - <PLStreamingSendingBufferDelegate> | |
- (void)streamingSessionSendingBufferDidEmpty:(id)session { | |
NSLog(@"Sending buffer empty"); | |
} | |
- (void)streamingSessionSendingBufferDidFull:(id)session { | |
NSLog(@"Sending buffer full"); | |
} | |
#pragma mark - <PLCameraStreamingSessionDelegate> | |
- (void)streamingSession:(PLStreamingSession *)session streamStateDidChange:(PLStreamState)state { | |
// 除 PLStreamStateError 外的所有状态都会回调在这里 | |
NSString *log = [NSString stringWithFormat:@"Stream State: %s", stateNames[state]]; | |
NSLog(@"%@", log); | |
if (PLStreamStateDisconnected == state) { | |
[self.actionButton setTitle:@"Start" forState:UIControlStateNormal]; | |
} | |
} | |
- (void)streamingSession:(PLStreamingSession *)session didDisconnectWithError:(NSError *)error { | |
// PLStreamStateError 状态会回调在这里 | |
NSString *log = [NSString stringWithFormat:@"Stream State: Error. %@", error]; | |
NSLog(@"%@", log); | |
[self.actionButton setTitle:@"Start" forState:UIControlStateNormal]; | |
} | |
- (void)streamingSession:(PLStreamingSession *)session streamStatusDidUpdate:(PLStreamStatus *)status { | |
NSLog(@"%@", status); | |
} | |
#pragma mark - Action | |
- (IBAction)actionButtonPressed:(id)sender { | |
self.actionButton.enabled = NO; | |
switch (self.session.streamState) { | |
case PLStreamStateConnected: | |
[self.session stop]; | |
[self.actionButton setTitle:@"Start" forState:UIControlStateNormal]; | |
self.actionButton.enabled = YES; | |
break; | |
case PLStreamStateUnknow: | |
case PLStreamStateDisconnected: | |
case PLStreamStateError: { | |
[self.session startWithCompleted:^(BOOL success) { | |
if (success) { | |
NSString *log = @"success to start streaming"; | |
NSLog(@"%@", log); | |
[self.actionButton setTitle:@"Stop" forState:UIControlStateNormal]; | |
} else { | |
NSString *log = @"fail to start streaming."; | |
NSLog(@"%@", log); | |
[self.actionButton setTitle:@"Start" forState:UIControlStateNormal]; | |
} | |
self.actionButton.enabled = YES; | |
}]; | |
} | |
break; | |
default: | |
break; | |
} | |
} | |
@end |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment