#include "AndroidInterface.h" #include #include #include #include #include "VideoCapturerInterfaceImpl.h" #include "sdk/android/native_api/base/init.h" #include "sdk/android/native_api/codecs/wrapper.h" #include "sdk/android/native_api/jni/class_loader.h" #include "sdk/android/native_api/jni/jvm.h" #include "sdk/android/native_api/jni/scoped_java_ref.h" #include "sdk/android/native_api/video/video_source.h" #include "api/video_codecs/builtin_video_encoder_factory.h" #include "api/video_codecs/builtin_video_decoder_factory.h" #include "pc/video_track_source_proxy.h" #include "sdk/android/src/jni/android_network_monitor.h" #include "api/video_track_source_proxy_factory.h" #include "AndroidContext.h" #include "media/engine/simulcast_encoder_adapter.h" namespace tgcalls { void AndroidInterface::configurePlatformAudio(int numChannels) { } class SimulcastVideoEncoderFactory : public webrtc::VideoEncoderFactory { public: std::unique_ptr main_factory; std::unique_ptr simulcast_adapter; SimulcastVideoEncoderFactory( std::unique_ptr main_factory ): main_factory(std::move(main_factory)) {} std::vector GetSupportedFormats() const override { return main_factory->GetSupportedFormats(); } std::vector GetImplementations() const override { return main_factory->GetImplementations(); } std::unique_ptr GetEncoderSelector() const override { return main_factory->GetEncoderSelector(); } std::unique_ptr CreateVideoEncoder(const webrtc::SdpVideoFormat& format) override { return std::make_unique(main_factory.get(), format); } CodecSupport QueryCodecSupport( const webrtc::SdpVideoFormat& format, absl::optional scalability_mode) const override { return main_factory->QueryCodecSupport(format, scalability_mode); } }; std::unique_ptr AndroidInterface::makeVideoEncoderFactory(std::shared_ptr platformContext, bool preferHardwareEncoding, bool isScreencast) { JNIEnv *env = webrtc::AttachCurrentThreadIfNeeded(); // AndroidContext *context = (AndroidContext *) platformContext.get(); // jmethodID methodId = env->GetMethodID(context->getJavaCapturerClass(), "getSharedEGLContext", "()Lorg/webrtc/EglBase$Context;"); // jobject eglContext = env->CallObjectMethod(context->getJavaCapturer(), methodId); webrtc::ScopedJavaLocalRef factory_class = webrtc::GetClass(env, "org/webrtc/DefaultVideoEncoderFactory"); jmethodID factory_constructor = env->GetMethodID( factory_class.obj(), "", "(Lorg/webrtc/EglBase$Context;ZZ)V"); webrtc::ScopedJavaLocalRef factory_object( env, env->NewObject(factory_class.obj(), factory_constructor, nullptr /* shared_context */, false /* enable_intel_vp8_encoder */, true /* enable_h264_high_profile */)); // return webrtc::JavaToNativeVideoEncoderFactory(env, factory_object.obj()); return std::make_unique(webrtc::JavaToNativeVideoEncoderFactory(env, factory_object.obj())); } std::unique_ptr AndroidInterface::makeVideoDecoderFactory(std::shared_ptr platformContext) { JNIEnv *env = webrtc::AttachCurrentThreadIfNeeded(); // AndroidContext *context = (AndroidContext *) platformContext.get(); // jmethodID methodId = env->GetMethodID(context->getJavaCapturerClass(), "getSharedEGLContext", "()Lorg/webrtc/EglBase$Context;"); // jobject eglContext = env->CallObjectMethod(context->getJavaCapturer(), methodId); webrtc::ScopedJavaLocalRef factory_class = webrtc::GetClass(env, "org/webrtc/DefaultVideoDecoderFactory"); jmethodID factory_constructor = env->GetMethodID( factory_class.obj(), "", "(Lorg/webrtc/EglBase$Context;)V"); webrtc::ScopedJavaLocalRef factory_object( env, env->NewObject(factory_class.obj(), factory_constructor, nullptr /* shared_context */)); return webrtc::JavaToNativeVideoDecoderFactory(env, factory_object.obj()); } void AndroidInterface::adaptVideoSource(rtc::scoped_refptr videoSource, int width, int height, int fps) { } rtc::scoped_refptr AndroidInterface::makeVideoSource(rtc::Thread *signalingThread, rtc::Thread *workerThread, bool screencapture) { JNIEnv *env = webrtc::AttachCurrentThreadIfNeeded(); _source[screencapture ? 1 : 0] = webrtc::CreateJavaVideoSource(env, signalingThread, false, false); return webrtc::CreateVideoTrackSourceProxy(signalingThread, workerThread, _source[screencapture ? 1 : 0].get()); } bool AndroidInterface::supportsEncoding(const std::string &codecName, std::shared_ptr platformContext) { if (hardwareVideoEncoderFactory == nullptr) { JNIEnv *env = webrtc::AttachCurrentThreadIfNeeded(); // AndroidContext *context = (AndroidContext *) platformContext.get(); // jmethodID methodId = env->GetMethodID(context->getJavaCapturerClass(), "getSharedEGLContext", "()Lorg/webrtc/EglBase$Context;"); // jobject eglContext = env->CallObjectMethod(context->getJavaCapturer(), methodId); webrtc::ScopedJavaLocalRef factory_class = webrtc::GetClass(env, "org/webrtc/HardwareVideoEncoderFactory"); jmethodID factory_constructor = env->GetMethodID( factory_class.obj(), "", "(Lorg/webrtc/EglBase$Context;ZZ)V"); webrtc::ScopedJavaLocalRef factory_object( env, env->NewObject(factory_class.obj(), factory_constructor, nullptr, false, true)); hardwareVideoEncoderFactory = webrtc::JavaToNativeVideoEncoderFactory(env, factory_object.obj()); } auto formats = hardwareVideoEncoderFactory->GetSupportedFormats(); for (auto format : formats) { if (format.name == codecName) { return true; } } return codecName == cricket::kVp8CodecName; } std::unique_ptr AndroidInterface::makeVideoCapturer( rtc::scoped_refptr source, std::string deviceId, std::function stateUpdated, std::function captureInfoUpdated, std::shared_ptr platformContext, std::pair &outResolution ) { return std::make_unique(_source[deviceId == "screen" ? 1 : 0], deviceId, stateUpdated, platformContext); } std::unique_ptr AndroidInterface::createNetworkMonitorFactory() { return std::make_unique(); } std::unique_ptr CreatePlatformInterface() { return std::make_unique(); } } // namespace tgcalls