// Set up Mixer Slot Configuration
IVSMixerSlotConfiguration *customSlot = [IVSMixerSlotConfiguration new];
customSlot.size = config.video.size;
customSlot.position = CGPointMake(0.0, 0.0);
customSlot.preferredAudioInput = IVSDeviceTypeUserAudio;
customSlot.preferredVideoInput = IVSDeviceTypeUserImage;
NSError *customSlotError = nil;
NSString * const customSlotName = @"custom-slot";
[customSlot setName:customSlotName error:customSlotError];
// Set this slot to Broadcast configuration we have created above
config.mixer.slots = @[customSlot];
配信セッション
最後に上記の設定で配信セッションをセットアップします。
NSError *broadcastSessionError = nil;
// providing nil to the camera descriptor parameter to let application logic to take control of the camera device
IVSBroadcastSession *broadcastSession = [[IVSBroadcastSession alloc] initWithConfiguration:config descriptors:nil delegate:nil error:&broadcastSessionError];
// Attach custom audio input source
id customAudioSource = [broadcastSession createAudioSourceWithName:@"custom-audio"];
[broadcastSession attachDevice:customAudioSource toSlotWithName:customSlotName onComplete:nil]; // <connect to your onComplete callback function>
self.customAudioSource = customAudioSource;
// Attach custom image input source
id customImageSource = [broadcastSession createImageSourceWithName:@"custom-image"];
[broadcastSession attachDevice:customImageSource toSlotWithName:customSlotName onComplete:nil]; // <connect to your onComplete callback function>
self.customImageSource = customImageSource;
self.broadcastSession = broadcastSession;
// BEVideoCapture delegate callback
- (void)videoCapture:(id<BEVideoSourceProtocol>)source didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer withRotation:(int)rotation {
// code that provides GLContext check removed for brevity
CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
CMTime sampleTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
double timeStamp = (double)sampleTime.value/sampleTime.timescale;
// code that provides lock on BytePlus Effect SDK for thread safety removed for brevity e.g. NSRecursiveLock
[self processWithCVPixelBuffer:pixelBuffer rotation:rotation timeStamp:timeStamp];
}
BytePlus Effect SDK を使ってカメラからのバッファを処理
実際の画像処理は、次のprocessWithCVPixelBuffer機能で行われます。
出入力画像バッファのセットアップ
- (void)processWithCVPixelBuffer:(CVPixelBufferRef)pixelBuffer rotation:(int)rotation timeStamp:(double)timeStamp {
BEPixelBufferInfo *pixelBufferInfo = [self.imageUtils getCVPixelBufferInfo:pixelBuffer];
// The BytePlus Effect SDK requires BGRA format
if (pixelBufferInfo.format != BE_BGRA) {
pixelBuffer = [self.imageUtils transforCVPixelBufferToCVPixelBuffer:pixelBuffer outputFormat:BE_BGRA];
}
if (rotation != 0) {
// The texture received by the BytePlus Effect SDK must be positive, so before calling the SDK, you need to rotate it first
pixelBuffer = [self.imageUtils rotateCVPixelBuffer:pixelBuffer rotation:rotation];
}
// Set up input buffer
id<BEGLTexture> texture = [self.imageUtils transforCVPixelBufferToTexture:pixelBuffer];
// Set up output buffer
id<BEGLTexture> outTexture = nil;
outTexture = [self.imageUtils getOutputPixelBufferGLTextureWithWidth:texture.width height:texture.height format:BE_BGRA];
// self.manager is the BEEffectManager during the BytePlus Effect SDK Initialization
// For demonstration purposes we will fix the rotation of the image.
int ret = [self.manager processTexture:texture.texture outputTexture:outTexture.texture width:texture.width height:texture.height rotate:BEF_AI_CLOCKWISE_ROTATE_0
timeStamp:timeStamp];
if (ret != BEF_RESULT_SUC) {
// Log successful image processing
} else {
// Log unsuccessful image processing
}
outTexture = texture;