Skip to content

Commit 2dde0da

Browse files
author
jeffgaogao
committed
docs(TcgSdk): release 3.9.0
1 parent cdda45d commit 2dde0da

21 files changed

+1259
-11
lines changed

Demo/TCRDemo/TCGDemo/TCGDemoGamePlayVC.h

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@ typedef void(^tGameStopBlk)(void);
1919

2020
- (instancetype)initWithPlay:(TcrSession *)play remoteSession:(NSString *)remoteSession;
2121
- (instancetype)initWithPlay:(TcrSession *)play remoteSession:(NSString *)remoteSession loadingView:(UIView *)loadingView;
22+
- (instancetype)initWithPlay:(TcrSession *)play remoteSession:(NSString *)remoteSession loadingView:(UIView *)loadingView captureWidth:(int)captureWidth captureHeight:(int)captureHeight captureFps:(int)captureFps;
2223
- (instancetype)initWithPlay:(TcrSession *)play experienceCode:(NSDictionary *)params;
2324

2425
@end

Demo/TCRDemo/TCGDemo/TCGDemoGamePlayVC.m

Lines changed: 81 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@
1616
#import "AudioQueuePlay.h"
1717
#import "TCGDemoAudioCapturor.h"
1818
#import <CoreMotion/CoreMotion.h>
19+
#import "video_capture/TCGCameraVideoCapturer.h"
1920

2021
@interface TCGDemoGamePlayVC () <TcrSessionObserver, TCGDemoTextFieldDelegate, CustomDataChannelObserver, TCGDemoSettingViewDelegate, TCRLogDelegate,
2122
VideoSink, AudioSink, TcrRenderViewObserver, TCGDemoMultiSettingViewDelegate, UIGestureRecognizerDelegate>
@@ -50,8 +51,12 @@ @interface TCGDemoGamePlayVC () <TcrSessionObserver, TCGDemoTextFieldDelegate, C
5051
@property (nonatomic, assign) BOOL isFirstRender;
5152
@property (nonatomic, assign) BOOL isMobile;
5253
@property (strong, nonatomic) CMMotionManager *motionManager;
53-
54-
54+
@property (nonatomic, strong) TCGCameraVideoCapturer *videoCapturer;
55+
@property (nonatomic, assign) BOOL isFrontCamera;
56+
@property (nonatomic, assign) int captureWidth;
57+
@property (nonatomic, assign) int captureHeight;
58+
@property (nonatomic, assign) int captureFps;
59+
@property (nonatomic, assign) BOOL enableSendCustomVideo;
5560
@end
5661

5762
@implementation TCGDemoGamePlayVC
@@ -67,6 +72,25 @@ - (instancetype)initWithPlay:(TcrSession *)play remoteSession:(NSString *)remote
6772
self.isFirstRender = NO;
6873
self.remoteSession = remoteSession;
6974
self.loadingView = (TCGDemoLoadingView *)loadingView;
75+
self.captureWidth = 720;
76+
self.captureHeight = 1280;
77+
self.captureFps = 20;
78+
[self.session setTcrSessionObserver:self];
79+
}
80+
return self;
81+
}
82+
83+
- (instancetype)initWithPlay:(TcrSession *)play remoteSession:(NSString *)remoteSession loadingView:(UIView *)loadingView captureWidth:(int)captureWidth captureHeight:(int)captureHeight captureFps:(int)captureFps {
84+
self = [super init];
85+
if (self) {
86+
self.session = play;
87+
self.isFirstRender = NO;
88+
self.remoteSession = remoteSession;
89+
self.loadingView = (TCGDemoLoadingView *)loadingView;
90+
self.captureWidth = captureWidth;
91+
self.captureHeight = captureHeight;
92+
self.captureFps = captureFps;
93+
self.enableSendCustomVideo = true;
7094
[self.session setTcrSessionObserver:self];
7195
}
7296
return self;
@@ -349,6 +373,7 @@ - (void)stopGame {
349373
[self.debugLabTimer invalidate];
350374
[self.renderView removeFromSuperview];
351375
[self.audioPlayer stop];
376+
[self.videoCapturer stopCapture];
352377
[self.session releaseSession];
353378
if (self.gameStopBlk) {
354379
self.gameStopBlk();
@@ -460,11 +485,25 @@ - (void)onEnableLocalAudio:(BOOL)enable {
460485
}
461486

462487
- (void)onEnableLocalVideo:(BOOL)enable {
488+
if (_enableSendCustomVideo) {
489+
if (enable) {
490+
AVCaptureDevice *device = [self selectDevice];
491+
AVCaptureDeviceFormat *format = [self selectFormatForDevice:device];
492+
if (_videoCapturer == nil) {
493+
_videoCapturer = [[TCGCameraVideoCapturer alloc] initWithTcrSession:self.session];
494+
}
495+
[_videoCapturer startCaptureWithDevice:device format:format fps:_captureFps];
496+
497+
} else {
498+
[_videoCapturer stopCapture];
499+
}
500+
}
501+
463502
[self.session setEnableLocalVideo:enable];
464503
}
465504

466505
- (void)onSwitchCamera:(BOOL)isFrontCamera {
467-
[self.session setLocalVideoProfile:1280 height:720 fps:30 minBitrate:1000 maxBitrate:5000 isFrontCamera:isFrontCamera];
506+
[self.session setLocalVideoProfile:_captureWidth height:_captureHeight fps:_captureFps minBitrate:1000 maxBitrate:5000 isFrontCamera:isFrontCamera];
468507
}
469508

470509
- (void)onRotateView {
@@ -583,6 +622,7 @@ - (void)onEvent:(TcrEvent)event eventData:(id)eventData {
583622
break;
584623
case CLIENT_STATS:
585624
info = (NSDictionary *)eventData;
625+
NSLog(@"ApiTest CLIENT_STATS: %@", info);
586626
[self updateDebugInfo:eventData];
587627
break;
588628
case CLIENT_IDLE:
@@ -605,8 +645,9 @@ - (void)onEvent:(TcrEvent)event eventData:(id)eventData {
605645
if ([status isEqualToString:@"close"]) {
606646
[self onEnableLocalVideo:false];
607647
} else {
648+
_isFrontCamera = [status isEqualToString:@"open_front"];
608649
[self onEnableLocalVideo:true];
609-
[self.session setLocalVideoProfile:720 height:1280 fps:20 minBitrate:1000 maxBitrate:15000 isFrontCamera:[status isEqualToString:@"open_front"] ? YES : NO];
650+
[self.session setLocalVideoProfile:_captureWidth height:_captureHeight fps:_captureFps minBitrate:1000 maxBitrate:15000 isFrontCamera:_isFrontCamera];
610651
}
611652
break;
612653
}
@@ -726,6 +767,42 @@ - (void)onsetMicMute:(nonnull NSString *)userid enable:(BOOL)index {
726767
NSLog(@"禁言结果:%d", retCode);
727768
}];
728769
}
770+
771+
- (AVCaptureDevice *)selectDevice {
772+
AVCaptureDevicePosition position = _isFrontCamera ? AVCaptureDevicePositionFront : AVCaptureDevicePositionBack;
773+
NSArray<AVCaptureDevice *> *captureDevices;
774+
if (@available(iOS 10.0, *)) {
775+
AVCaptureDeviceDiscoverySession *session =
776+
[AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes:@[AVCaptureDeviceTypeBuiltInWideAngleCamera] mediaType:AVMediaTypeVideo
777+
position:AVCaptureDevicePositionUnspecified];
778+
captureDevices = session.devices;
779+
} else {
780+
captureDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
781+
}
782+
for (AVCaptureDevice *device in captureDevices) {
783+
if (device.position == position) {
784+
return device;
785+
}
786+
}
787+
return captureDevices[0];
788+
}
789+
- (AVCaptureDeviceFormat *)selectFormatForDevice:(AVCaptureDevice *)device {
790+
NSArray<AVCaptureDeviceFormat *> *formats = device.formats;
791+
AVCaptureDeviceFormat *selectedFormat = nil;
792+
int currentDiff = INT_MAX;
793+
for (AVCaptureDeviceFormat *format in formats) {
794+
CMVideoDimensions dimension = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
795+
FourCharCode pixelFormat = CMFormatDescriptionGetMediaSubType(format.formatDescription);
796+
int diff = abs(_captureWidth - dimension.width) + abs(_captureHeight - dimension.height);
797+
if (diff <= currentDiff) {
798+
selectedFormat = format;
799+
currentDiff = diff;
800+
} else if (_videoCapturer != nil && diff == currentDiff && pixelFormat == [_videoCapturer preferredOutputPixelFormat]) {
801+
selectedFormat = format;
802+
}
803+
}
804+
return selectedFormat;
805+
}
729806

730807
#pragma mark - 手势操作
731808
- (void)addEdgeSwipeGestures {

Demo/TCRDemo/TCGDemo/experience/TCGDemoExperienceVC.m

Lines changed: 25 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -57,6 +57,11 @@ @interface TCGDemoExperienceVC()<TcrSessionObserver, TCGDemoInputDelegate, TCRAu
5757
NSString *_userId;
5858
NSString *_experienceCode;
5959
BOOL _enableCustomAudioCapture;
60+
BOOL _enableCustomVideoCapture;
61+
int _captureWidth;
62+
int _captureHeight;
63+
int _captureFps;
64+
BOOL _enableSendCustomVideo;
6065
NSNumber* _idleThreshold;
6166

6267
UIButton *_startBtn;
@@ -168,6 +173,7 @@ - (void)initSubviews {
168173
_loadingView = [[TCGDemoLoadingView alloc] initWithFrame:self.view.bounds process:0];
169174
_loadingView.hidden = YES;
170175
_enableCustomAudioCapture = false;
176+
_enableCustomVideoCapture = true;
171177
}
172178

173179
- (void)initAdvanceInput {
@@ -523,9 +529,17 @@ - (void)stopExperienceSession {
523529
}
524530

525531
- (void)gotoGameplayVC:(NSString *)remoteSession {
526-
TCGDemoGamePlayVC *subVC = [[TCGDemoGamePlayVC alloc] initWithPlay:self.session
527-
remoteSession:remoteSession
528-
loadingView:_loadingView];
532+
TCGDemoGamePlayVC *subVC;
533+
if (_enableCustomVideoCapture) {
534+
subVC = [[TCGDemoGamePlayVC alloc] initWithPlay:self.session
535+
remoteSession:remoteSession
536+
loadingView:_loadingView captureWidth:_captureWidth captureHeight:_captureHeight captureFps:_captureFps];
537+
} else {
538+
subVC = [[TCGDemoGamePlayVC alloc] initWithPlay:self.session
539+
remoteSession:remoteSession
540+
loadingView:_loadingView];
541+
}
542+
529543
[self addChildViewController:subVC];
530544
subVC.view.frame = self.view.bounds;
531545
[self.view insertSubview:subVC.view belowSubview:_loadingView];
@@ -572,10 +586,15 @@ - (void)createGamePlayer {
572586
NSInteger channelCount = 1;
573587
[[TCGDemoAudioCapturor shared]configureWithSampleRate:sampleRate channelCount:channelCount dumpAudio:false];
574588
[tcrConfig setValue:@{@"sampleRate":@(sampleRate), @"useStereoInput":@(channelCount == 2)} forKey:@"enableCustomAudioCapture"];
575-
self.session = [[TcrSession alloc] initWithParams:tcrConfig andDelegate:self];
576-
} else {
577-
self.session = [[TcrSession alloc] initWithParams:tcrConfig andDelegate:self];
578589
}
590+
if (_enableCustomVideoCapture) {
591+
_captureWidth = 720;
592+
_captureHeight = 1280;
593+
_captureFps = 30;
594+
[tcrConfig setValue:@{@"captureWidth":@(_captureWidth), @"captureHeight":@(_captureHeight), @"captureFps":@(_captureFps)} forKey:@"enableCustomVideoCapture"];
595+
}
596+
597+
self.session = [[TcrSession alloc] initWithParams:tcrConfig andDelegate:self];
579598
}
580599

581600
- (void)stopGame {
Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,23 @@
1+
/*
2+
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
3+
*
4+
* Use of this source code is governed by a BSD-style license
5+
* that can be found in the LICENSE file in the root of the source
6+
* tree. An additional intellectual property rights grant can be found
7+
* in the file PATENTS. All contributing project authors may
8+
* be found in the AUTHORS file in the root of the source tree.
9+
*/
10+
11+
#import <AVFoundation/AVFoundation.h>
12+
#import <CoreMedia/CoreMedia.h>
13+
14+
NS_ASSUME_NONNULL_BEGIN
15+
16+
@interface AVCaptureSession (DevicePosition)
17+
18+
// Check the image's EXIF for the camera the image came from.
19+
+ (AVCaptureDevicePosition)devicePositionForSampleBuffer:(CMSampleBufferRef)sampleBuffer;
20+
21+
@end
22+
23+
NS_ASSUME_NONNULL_END
Lines changed: 51 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,51 @@
1+
/*
2+
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
3+
*
4+
* Use of this source code is governed by a BSD-style license
5+
* that can be found in the LICENSE file in the root of the source
6+
* tree. An additional intellectual property rights grant can be found
7+
* in the file PATENTS. All contributing project authors may
8+
* be found in the AUTHORS file in the root of the source tree.
9+
*/
10+
11+
#import "AVCaptureSession+DevicePosition.h"
12+
13+
BOOL CFStringContainsString(CFStringRef theString, CFStringRef stringToFind) {
14+
return CFStringFindWithOptions(theString,
15+
stringToFind,
16+
CFRangeMake(0, CFStringGetLength(theString)),
17+
kCFCompareCaseInsensitive,
18+
nil);
19+
}
20+
21+
@implementation AVCaptureSession (DevicePosition)
22+
23+
+ (AVCaptureDevicePosition)devicePositionForSampleBuffer:(CMSampleBufferRef)sampleBuffer {
24+
// Check the image's EXIF for the camera the image came from.
25+
AVCaptureDevicePosition cameraPosition = AVCaptureDevicePositionUnspecified;
26+
CFDictionaryRef attachments = CMCopyDictionaryOfAttachments(
27+
kCFAllocatorDefault, sampleBuffer, kCMAttachmentMode_ShouldPropagate);
28+
if (attachments) {
29+
int size = CFDictionaryGetCount(attachments);
30+
if (size > 0) {
31+
CFDictionaryRef cfExifDictVal = nil;
32+
if (CFDictionaryGetValueIfPresent(
33+
attachments, (const void *)CFSTR("{Exif}"), (const void **)&cfExifDictVal)) {
34+
CFStringRef cfLensModelStrVal;
35+
if (CFDictionaryGetValueIfPresent(cfExifDictVal,
36+
(const void *)CFSTR("LensModel"),
37+
(const void **)&cfLensModelStrVal)) {
38+
if (CFStringContainsString(cfLensModelStrVal, CFSTR("front"))) {
39+
cameraPosition = AVCaptureDevicePositionFront;
40+
} else if (CFStringContainsString(cfLensModelStrVal, CFSTR("back"))) {
41+
cameraPosition = AVCaptureDevicePositionBack;
42+
}
43+
}
44+
}
45+
}
46+
CFRelease(attachments);
47+
}
48+
return cameraPosition;
49+
}
50+
51+
@end
Lines changed: 57 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,57 @@
1+
/*
2+
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
3+
*
4+
* Use of this source code is governed by a BSD-style license
5+
* that can be found in the LICENSE file in the root of the source
6+
* tree. An additional intellectual property rights grant can be found
7+
* in the file PATENTS. All contributing project authors may
8+
* be found in the AUTHORS file in the root of the source tree.
9+
*/
10+
11+
#import <AVFoundation/AVFoundation.h>
12+
#import <Foundation/Foundation.h>
13+
14+
15+
NS_ASSUME_NONNULL_BEGIN
16+
17+
// Camera capture that implements RTCVideoCapturer. Delivers frames to a
18+
// RTCVideoCapturerDelegate (usually RTCVideoSource).
19+
NS_EXTENSION_UNAVAILABLE_IOS("Camera not available in app extensions.")
20+
@interface TCGCameraVideoCapturer: NSObject
21+
22+
// Capture session that is used for capturing. Valid from initialization to dealloc.
23+
@property(readonly, nonatomic) AVCaptureSession *captureSession;
24+
25+
// Returns list of available capture devices that support video capture.
26+
+ (NSArray<AVCaptureDevice *> *)captureDevices;
27+
// Returns list of formats that are supported by this class for this device.
28+
+ (NSArray<AVCaptureDeviceFormat *> *)supportedFormatsForDevice:(AVCaptureDevice *)device;
29+
30+
- (instancetype)initWithTcrSession:(TcrSession*)tcrSession;
31+
32+
- (instancetype)initWithTcrSession:(TcrSession*)tcrSession captureSession:(AVCaptureSession *)captureSession;
33+
34+
// Returns the most efficient supported output pixel format for this capturer.
35+
- (FourCharCode)preferredOutputPixelFormat;
36+
37+
// Starts the capture session asynchronously and notifies callback on completion.
38+
// The device will capture video in the format given in the `format` parameter. If the pixel format
39+
// in `format` is supported by the WebRTC pipeline, the same pixel format will be used for the
40+
// output. Otherwise, the format returned by `preferredOutputPixelFormat` will be used.
41+
- (void)startCaptureWithDevice:(AVCaptureDevice *)device
42+
format:(AVCaptureDeviceFormat *)format
43+
fps:(NSInteger)fps
44+
completionHandler:(nullable void (^)(NSError *_Nullable))completionHandler;
45+
// Stops the capture session asynchronously and notifies callback on completion.
46+
- (void)stopCaptureWithCompletionHandler:(nullable void (^)(void))completionHandler;
47+
48+
// Starts the capture session asynchronously.
49+
- (void)startCaptureWithDevice:(AVCaptureDevice *)device
50+
format:(AVCaptureDeviceFormat *)format
51+
fps:(NSInteger)fps;
52+
// Stops the capture session asynchronously.
53+
- (void)stopCapture;
54+
55+
@end
56+
57+
NS_ASSUME_NONNULL_END

0 commit comments

Comments
 (0)